1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
35 acceptable form for is_gimple_min_invariant. */
38 canonicalize_constructor_val (tree cval)
41 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
43 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
44 TREE_OPERAND (cval, 0),
45 TREE_OPERAND (cval, 1),
50 if (TREE_CODE (cval) == ADDR_EXPR)
52 tree base = get_base_address (TREE_OPERAND (cval, 0));
53 if (base && TREE_CODE (base) == VAR_DECL)
54 add_referenced_var (base);
59 /* If SYM is a constant variable with known value, return the value.
60 NULL_TREE is returned otherwise. */
63 get_symbol_constant_value (tree sym)
65 if ((TREE_STATIC (sym) || DECL_EXTERNAL (sym))
66 && (TREE_CODE (sym) == CONST_DECL
67 || varpool_get_node (sym)->const_value_known))
69 tree val = DECL_INITIAL (sym);
72 val = canonicalize_constructor_val (val);
73 if (is_gimple_min_invariant (val))
76 /* Variables declared 'const' without an initializer
77 have zero as the initializer if they may not be
78 overridden at link or run time. */
80 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
81 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
82 return fold_convert (TREE_TYPE (sym), integer_zero_node);
89 /* Return true if we may propagate the address expression ADDR into the
90 dereference DEREF and cancel them. */
93 may_propagate_address_into_dereference (tree addr, tree deref)
95 gcc_assert (TREE_CODE (deref) == MEM_REF
96 && TREE_CODE (addr) == ADDR_EXPR);
98 /* Don't propagate if ADDR's operand has incomplete type. */
99 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
102 /* If the address is invariant then we do not need to preserve restrict
103 qualifications. But we do need to preserve volatile qualifiers until
104 we can annotate the folded dereference itself properly. */
105 if (is_gimple_min_invariant (addr)
106 && (!TREE_THIS_VOLATILE (deref)
107 || TYPE_VOLATILE (TREE_TYPE (addr))))
108 return useless_type_conversion_p (TREE_TYPE (deref),
109 TREE_TYPE (TREE_OPERAND (addr, 0)));
111 /* Else both the address substitution and the folding must result in
112 a valid useless type conversion sequence. */
113 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
115 && useless_type_conversion_p (TREE_TYPE (deref),
116 TREE_TYPE (TREE_OPERAND (addr, 0))));
120 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
121 BASE is an array type. OFFSET is a byte displacement.
123 LOC is the location of the original expression. */
126 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
128 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
129 tree array_type, elt_type, elt_size;
132 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
133 measured in units of the size of elements type) from that ARRAY_REF).
134 We can't do anything if either is variable.
136 The case we handle here is *(&A[N]+O). */
137 if (TREE_CODE (base) == ARRAY_REF)
139 tree low_bound = array_ref_low_bound (base);
141 elt_offset = TREE_OPERAND (base, 1);
142 if (TREE_CODE (low_bound) != INTEGER_CST
143 || TREE_CODE (elt_offset) != INTEGER_CST)
146 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
147 base = TREE_OPERAND (base, 0);
150 /* Ignore stupid user tricks of indexing non-array variables. */
151 array_type = TREE_TYPE (base);
152 if (TREE_CODE (array_type) != ARRAY_TYPE)
154 elt_type = TREE_TYPE (array_type);
156 /* Use signed size type for intermediate computation on the index. */
157 idx_type = ssizetype;
159 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
160 element type (so we can use the alignment if it's not constant).
161 Otherwise, compute the offset as an index by using a division. If the
162 division isn't exact, then don't do anything. */
163 elt_size = TYPE_SIZE_UNIT (elt_type);
166 if (integer_zerop (offset))
168 if (TREE_CODE (elt_size) != INTEGER_CST)
169 elt_size = size_int (TYPE_ALIGN (elt_type));
171 idx = build_int_cst (idx_type, 0);
175 unsigned HOST_WIDE_INT lquo, lrem;
176 HOST_WIDE_INT hquo, hrem;
179 /* The final array offset should be signed, so we need
180 to sign-extend the (possibly pointer) offset here
181 and use signed division. */
182 soffset = double_int_sext (tree_to_double_int (offset),
183 TYPE_PRECISION (TREE_TYPE (offset)));
184 if (TREE_CODE (elt_size) != INTEGER_CST
185 || div_and_round_double (TRUNC_DIV_EXPR, 0,
186 soffset.low, soffset.high,
187 TREE_INT_CST_LOW (elt_size),
188 TREE_INT_CST_HIGH (elt_size),
189 &lquo, &hquo, &lrem, &hrem)
193 idx = build_int_cst_wide (idx_type, lquo, hquo);
196 /* Assume the low bound is zero. If there is a domain type, get the
197 low bound, if any, convert the index into that type, and add the
199 min_idx = build_int_cst (idx_type, 0);
200 domain_type = TYPE_DOMAIN (array_type);
203 idx_type = domain_type;
204 if (TYPE_MIN_VALUE (idx_type))
205 min_idx = TYPE_MIN_VALUE (idx_type);
207 min_idx = fold_convert (idx_type, min_idx);
209 if (TREE_CODE (min_idx) != INTEGER_CST)
212 elt_offset = fold_convert (idx_type, elt_offset);
215 if (!integer_zerop (min_idx))
216 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
217 if (!integer_zerop (elt_offset))
218 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
220 /* Make sure to possibly truncate late after offsetting. */
221 idx = fold_convert (idx_type, idx);
223 /* We don't want to construct access past array bounds. For example
226 should not be simplified into (*c)[14] or tree-vrp will
228 This is only an issue for multi-dimensional arrays. */
229 if (TREE_CODE (elt_type) == ARRAY_TYPE
232 if (TYPE_MAX_VALUE (domain_type)
233 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
234 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
236 else if (TYPE_MIN_VALUE (domain_type)
237 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
238 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
240 else if (compare_tree_int (idx, 0) < 0)
245 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
246 SET_EXPR_LOCATION (t, loc);
252 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
253 LOC is the location of original expression.
255 Before attempting the conversion strip off existing ADDR_EXPRs. */
258 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
264 if (TREE_CODE (base) != ADDR_EXPR)
267 base = TREE_OPERAND (base, 0);
268 if (types_compatible_p (orig_type, TREE_TYPE (base))
269 && integer_zerop (offset))
272 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
273 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
278 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
279 LOC is the location of the original expression. */
282 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
288 if (TREE_CODE (addr) != ADDR_EXPR)
290 base = TREE_OPERAND (addr, 0);
291 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
294 ret = build_fold_addr_expr (ret);
295 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
297 SET_EXPR_LOCATION (ret, loc);
304 /* A quaint feature extant in our address arithmetic is that there
305 can be hidden type changes here. The type of the result need
306 not be the same as the type of the input pointer.
308 What we're after here is an expression of the form
309 (T *)(&array + const)
310 where array is OP0, const is OP1, RES_TYPE is T and
311 the cast doesn't actually exist, but is implicit in the
312 type of the POINTER_PLUS_EXPR. We'd like to turn this into
314 which may be able to propagate further. */
317 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
322 /* The first operand should be an ADDR_EXPR. */
323 if (TREE_CODE (op0) != ADDR_EXPR)
325 op0 = TREE_OPERAND (op0, 0);
327 /* It had better be a constant. */
328 if (TREE_CODE (op1) != INTEGER_CST)
330 /* Or op0 should now be A[0] and the non-constant offset defined
331 via a multiplication by the array element size. */
332 if (TREE_CODE (op0) == ARRAY_REF
333 /* As we will end up creating a variable index array access
334 in the outermost array dimension make sure there isn't
335 a more inner array that the index could overflow to. */
336 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
337 && integer_zerop (TREE_OPERAND (op0, 1))
338 && TREE_CODE (op1) == SSA_NAME)
340 gimple offset_def = SSA_NAME_DEF_STMT (op1);
341 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
342 if (!host_integerp (elsz, 1)
343 || !is_gimple_assign (offset_def))
346 /* Do not build array references of something that we can't
347 see the true number of array dimensions for. */
348 if (!DECL_P (TREE_OPERAND (op0, 0))
349 && !handled_component_p (TREE_OPERAND (op0, 0)))
352 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
353 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
354 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
355 return build_fold_addr_expr
356 (build4 (ARRAY_REF, TREE_TYPE (op0),
357 TREE_OPERAND (op0, 0),
358 gimple_assign_rhs1 (offset_def),
359 TREE_OPERAND (op0, 2),
360 TREE_OPERAND (op0, 3)));
361 else if (integer_onep (elsz)
362 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
363 return build_fold_addr_expr
364 (build4 (ARRAY_REF, TREE_TYPE (op0),
365 TREE_OPERAND (op0, 0),
367 TREE_OPERAND (op0, 2),
368 TREE_OPERAND (op0, 3)));
370 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
372 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
373 && TREE_CODE (op1) == SSA_NAME)
375 gimple offset_def = SSA_NAME_DEF_STMT (op1);
376 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
377 if (!host_integerp (elsz, 1)
378 || !is_gimple_assign (offset_def))
381 /* Do not build array references of something that we can't
382 see the true number of array dimensions for. */
384 && !handled_component_p (op0))
387 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
388 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
389 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
390 return build_fold_addr_expr
391 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
392 op0, gimple_assign_rhs1 (offset_def),
393 integer_zero_node, NULL_TREE));
394 else if (integer_onep (elsz)
395 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
396 return build_fold_addr_expr
397 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
399 integer_zero_node, NULL_TREE));
405 /* If the first operand is an ARRAY_REF, expand it so that we can fold
406 the offset into it. */
407 while (TREE_CODE (op0) == ARRAY_REF)
409 tree array_obj = TREE_OPERAND (op0, 0);
410 tree array_idx = TREE_OPERAND (op0, 1);
411 tree elt_type = TREE_TYPE (op0);
412 tree elt_size = TYPE_SIZE_UNIT (elt_type);
415 if (TREE_CODE (array_idx) != INTEGER_CST)
417 if (TREE_CODE (elt_size) != INTEGER_CST)
420 /* Un-bias the index by the min index of the array type. */
421 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
424 min_idx = TYPE_MIN_VALUE (min_idx);
427 if (TREE_CODE (min_idx) != INTEGER_CST)
430 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
431 if (!integer_zerop (min_idx))
432 array_idx = int_const_binop (MINUS_EXPR, array_idx,
437 /* Convert the index to a byte offset. */
438 array_idx = fold_convert (sizetype, array_idx);
439 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
441 /* Update the operands for the next round, or for folding. */
442 op1 = int_const_binop (PLUS_EXPR,
447 ptd_type = TREE_TYPE (res_type);
448 /* If we want a pointer to void, reconstruct the reference from the
449 array element type. A pointer to that can be trivially converted
450 to void *. This happens as we fold (void *)(ptr p+ off). */
451 if (VOID_TYPE_P (ptd_type)
452 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
453 ptd_type = TREE_TYPE (TREE_TYPE (op0));
455 /* At which point we can try some of the same things as for indirects. */
456 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
459 t = build_fold_addr_expr (t);
460 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
462 SET_EXPR_LOCATION (t, loc);
468 /* Subroutine of fold_stmt. We perform several simplifications of the
469 memory reference tree EXPR and make sure to re-gimplify them properly
470 after propagation of constant addresses. IS_LHS is true if the
471 reference is supposed to be an lvalue. */
474 maybe_fold_reference (tree expr, bool is_lhs)
480 && (result = fold_const_aggregate_ref (expr)))
483 /* ??? We might want to open-code the relevant remaining cases
484 to avoid using the generic fold. */
485 if (handled_component_p (*t)
486 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
488 tree tem = fold (*t);
493 while (handled_component_p (*t))
494 t = &TREE_OPERAND (*t, 0);
496 /* Fold back MEM_REFs to reference trees. */
497 if (TREE_CODE (*t) == MEM_REF
498 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
499 && integer_zerop (TREE_OPERAND (*t, 1))
500 && (TREE_THIS_VOLATILE (*t)
501 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
502 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
503 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
504 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
505 /* We have to look out here to not drop a required conversion
506 from the rhs to the lhs if is_lhs, but we don't have the
507 rhs here to verify that. Thus require strict type
509 && types_compatible_p (TREE_TYPE (*t),
510 TREE_TYPE (TREE_OPERAND
511 (TREE_OPERAND (*t, 0), 0))))
514 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
515 tem = maybe_fold_reference (expr, is_lhs);
520 /* Canonicalize MEM_REFs invariant address operand. */
521 else if (TREE_CODE (*t) == MEM_REF
522 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
523 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
524 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
526 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
527 TREE_OPERAND (*t, 0),
528 TREE_OPERAND (*t, 1));
532 tem = maybe_fold_reference (expr, is_lhs);
538 else if (TREE_CODE (*t) == TARGET_MEM_REF)
540 tree tem = maybe_fold_tmr (*t);
544 tem = maybe_fold_reference (expr, is_lhs);
553 tree tem = get_symbol_constant_value (*t);
555 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
557 *t = unshare_expr (tem);
558 tem = maybe_fold_reference (expr, is_lhs);
569 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
570 replacement rhs for the statement or NULL_TREE if no simplification
571 could be made. It is assumed that the operands have been previously
575 fold_gimple_assign (gimple_stmt_iterator *si)
577 gimple stmt = gsi_stmt (*si);
578 enum tree_code subcode = gimple_assign_rhs_code (stmt);
579 location_t loc = gimple_location (stmt);
581 tree result = NULL_TREE;
583 switch (get_gimple_rhs_class (subcode))
585 case GIMPLE_SINGLE_RHS:
587 tree rhs = gimple_assign_rhs1 (stmt);
589 /* Try to fold a conditional expression. */
590 if (TREE_CODE (rhs) == COND_EXPR)
592 tree op0 = COND_EXPR_COND (rhs);
595 location_t cond_loc = EXPR_LOCATION (rhs);
597 if (COMPARISON_CLASS_P (op0))
599 fold_defer_overflow_warnings ();
600 tem = fold_binary_loc (cond_loc,
601 TREE_CODE (op0), TREE_TYPE (op0),
602 TREE_OPERAND (op0, 0),
603 TREE_OPERAND (op0, 1));
604 /* This is actually a conditional expression, not a GIMPLE
605 conditional statement, however, the valid_gimple_rhs_p
606 test still applies. */
607 set = (tem && is_gimple_condexpr (tem)
608 && valid_gimple_rhs_p (tem));
609 fold_undefer_overflow_warnings (set, stmt, 0);
611 else if (is_gimple_min_invariant (op0))
620 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
621 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
624 else if (REFERENCE_CLASS_P (rhs))
625 return maybe_fold_reference (rhs, false);
627 else if (TREE_CODE (rhs) == ADDR_EXPR)
629 tree ref = TREE_OPERAND (rhs, 0);
630 tree tem = maybe_fold_reference (ref, true);
632 && TREE_CODE (tem) == MEM_REF
633 && integer_zerop (TREE_OPERAND (tem, 1)))
634 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
636 result = fold_convert (TREE_TYPE (rhs),
637 build_fold_addr_expr_loc (loc, tem));
638 else if (TREE_CODE (ref) == MEM_REF
639 && integer_zerop (TREE_OPERAND (ref, 1)))
640 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
643 else if (TREE_CODE (rhs) == CONSTRUCTOR
644 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
645 && (CONSTRUCTOR_NELTS (rhs)
646 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
648 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
652 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
653 if (TREE_CODE (val) != INTEGER_CST
654 && TREE_CODE (val) != REAL_CST
655 && TREE_CODE (val) != FIXED_CST)
658 return build_vector_from_ctor (TREE_TYPE (rhs),
659 CONSTRUCTOR_ELTS (rhs));
662 else if (DECL_P (rhs))
663 return unshare_expr (get_symbol_constant_value (rhs));
665 /* If we couldn't fold the RHS, hand over to the generic
667 if (result == NULL_TREE)
670 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
671 that may have been added by fold, and "useless" type
672 conversions that might now be apparent due to propagation. */
673 STRIP_USELESS_TYPE_CONVERSION (result);
675 if (result != rhs && valid_gimple_rhs_p (result))
682 case GIMPLE_UNARY_RHS:
684 tree rhs = gimple_assign_rhs1 (stmt);
686 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
689 /* If the operation was a conversion do _not_ mark a
690 resulting constant with TREE_OVERFLOW if the original
691 constant was not. These conversions have implementation
692 defined behavior and retaining the TREE_OVERFLOW flag
693 here would confuse later passes such as VRP. */
694 if (CONVERT_EXPR_CODE_P (subcode)
695 && TREE_CODE (result) == INTEGER_CST
696 && TREE_CODE (rhs) == INTEGER_CST)
697 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
699 STRIP_USELESS_TYPE_CONVERSION (result);
700 if (valid_gimple_rhs_p (result))
703 else if (CONVERT_EXPR_CODE_P (subcode)
704 && POINTER_TYPE_P (gimple_expr_type (stmt))
705 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
707 tree type = gimple_expr_type (stmt);
708 tree t = maybe_fold_offset_to_address (loc,
709 gimple_assign_rhs1 (stmt),
710 integer_zero_node, type);
717 case GIMPLE_BINARY_RHS:
718 /* Try to fold pointer addition. */
719 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
721 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
722 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
724 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
725 if (!useless_type_conversion_p
726 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
727 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
729 result = maybe_fold_stmt_addition (gimple_location (stmt),
731 gimple_assign_rhs1 (stmt),
732 gimple_assign_rhs2 (stmt));
736 result = fold_binary_loc (loc, subcode,
737 TREE_TYPE (gimple_assign_lhs (stmt)),
738 gimple_assign_rhs1 (stmt),
739 gimple_assign_rhs2 (stmt));
743 STRIP_USELESS_TYPE_CONVERSION (result);
744 if (valid_gimple_rhs_p (result))
747 /* Fold might have produced non-GIMPLE, so if we trust it blindly
748 we lose canonicalization opportunities. Do not go again
749 through fold here though, or the same non-GIMPLE will be
751 if (commutative_tree_code (subcode)
752 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
753 gimple_assign_rhs2 (stmt), false))
754 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
755 gimple_assign_rhs2 (stmt),
756 gimple_assign_rhs1 (stmt));
760 case GIMPLE_TERNARY_RHS:
761 result = fold_ternary_loc (loc, subcode,
762 TREE_TYPE (gimple_assign_lhs (stmt)),
763 gimple_assign_rhs1 (stmt),
764 gimple_assign_rhs2 (stmt),
765 gimple_assign_rhs3 (stmt));
769 STRIP_USELESS_TYPE_CONVERSION (result);
770 if (valid_gimple_rhs_p (result))
773 /* Fold might have produced non-GIMPLE, so if we trust it blindly
774 we lose canonicalization opportunities. Do not go again
775 through fold here though, or the same non-GIMPLE will be
777 if (commutative_ternary_tree_code (subcode)
778 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
779 gimple_assign_rhs2 (stmt), false))
780 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
781 gimple_assign_rhs2 (stmt),
782 gimple_assign_rhs1 (stmt),
783 gimple_assign_rhs3 (stmt));
787 case GIMPLE_INVALID_RHS:
794 /* Attempt to fold a conditional statement. Return true if any changes were
795 made. We only attempt to fold the condition expression, and do not perform
796 any transformation that would require alteration of the cfg. It is
797 assumed that the operands have been previously folded. */
800 fold_gimple_cond (gimple stmt)
802 tree result = fold_binary_loc (gimple_location (stmt),
803 gimple_cond_code (stmt),
805 gimple_cond_lhs (stmt),
806 gimple_cond_rhs (stmt));
810 STRIP_USELESS_TYPE_CONVERSION (result);
811 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
813 gimple_cond_set_condition_from_tree (stmt, result);
821 /* Convert EXPR into a GIMPLE value suitable for substitution on the
822 RHS of an assignment. Insert the necessary statements before
823 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
824 is replaced. If the call is expected to produces a result, then it
825 is replaced by an assignment of the new RHS to the result variable.
826 If the result is to be ignored, then the call is replaced by a
827 GIMPLE_NOP. A proper VDEF chain is retained by making the first
828 VUSE and the last VDEF of the whole sequence be the same as the replaced
829 statement and using new SSA names for stores in between. */
832 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
835 tree tmp = NULL_TREE; /* Silence warning. */
836 gimple stmt, new_stmt;
837 gimple_stmt_iterator i;
838 gimple_seq stmts = gimple_seq_alloc();
839 struct gimplify_ctx gctx;
841 gimple laststore = NULL;
844 stmt = gsi_stmt (*si_p);
846 gcc_assert (is_gimple_call (stmt));
848 lhs = gimple_call_lhs (stmt);
849 reaching_vuse = gimple_vuse (stmt);
851 push_gimplify_context (&gctx);
853 if (lhs == NULL_TREE)
854 gimplify_and_add (expr, &stmts);
856 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
858 pop_gimplify_context (NULL);
860 if (gimple_has_location (stmt))
861 annotate_all_with_location (stmts, gimple_location (stmt));
863 /* The replacement can expose previously unreferenced variables. */
864 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
868 gsi_insert_before (si_p, last, GSI_NEW_STMT);
871 new_stmt = gsi_stmt (i);
872 if (gimple_in_ssa_p (cfun))
874 find_new_referenced_vars (new_stmt);
875 mark_symbols_for_renaming (new_stmt);
877 /* If the new statement has a VUSE, update it with exact SSA name we
878 know will reach this one. */
879 if (gimple_vuse (new_stmt))
881 /* If we've also seen a previous store create a new VDEF for
882 the latter one, and make that the new reaching VUSE. */
885 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
886 gimple_set_vdef (laststore, reaching_vuse);
887 update_stmt (laststore);
890 gimple_set_vuse (new_stmt, reaching_vuse);
891 gimple_set_modified (new_stmt, true);
893 if (gimple_assign_single_p (new_stmt)
894 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
896 laststore = new_stmt;
901 if (lhs == NULL_TREE)
903 /* If we replace a call without LHS that has a VDEF and our new
904 sequence ends with a store we must make that store have the same
905 vdef in order not to break the sequencing. This can happen
906 for instance when folding memcpy calls into assignments. */
907 if (gimple_vdef (stmt) && laststore)
909 gimple_set_vdef (laststore, gimple_vdef (stmt));
910 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
911 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
912 update_stmt (laststore);
914 else if (gimple_in_ssa_p (cfun))
916 unlink_stmt_vdef (stmt);
925 gsi_insert_before (si_p, last, GSI_NEW_STMT);
928 if (laststore && is_gimple_reg (lhs))
930 gimple_set_vdef (laststore, gimple_vdef (stmt));
931 update_stmt (laststore);
932 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
933 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
938 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
939 gimple_set_vdef (laststore, reaching_vuse);
940 update_stmt (laststore);
943 new_stmt = gimple_build_assign (lhs, tmp);
944 if (!is_gimple_reg (tmp))
945 gimple_set_vuse (new_stmt, reaching_vuse);
946 if (!is_gimple_reg (lhs))
948 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
949 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
950 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
954 gimple_set_location (new_stmt, gimple_location (stmt));
955 gsi_replace (si_p, new_stmt, false);
958 /* Return the string length, maximum string length or maximum value of
960 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
961 is not NULL and, for TYPE == 0, its value is not equal to the length
962 we determine or if we are unable to determine the length or value,
963 return false. VISITED is a bitmap of visited variables.
964 TYPE is 0 if string length should be returned, 1 for maximum string
965 length and 2 for maximum value ARG can have. */
968 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
973 if (TREE_CODE (arg) != SSA_NAME)
975 if (TREE_CODE (arg) == COND_EXPR)
976 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
977 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
978 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
979 else if (TREE_CODE (arg) == ADDR_EXPR
980 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
981 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
983 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
984 if (TREE_CODE (aop0) == INDIRECT_REF
985 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
986 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
987 length, visited, type);
993 if (TREE_CODE (val) != INTEGER_CST
994 || tree_int_cst_sgn (val) < 0)
998 val = c_strlen (arg, 1);
1006 if (TREE_CODE (*length) != INTEGER_CST
1007 || TREE_CODE (val) != INTEGER_CST)
1010 if (tree_int_cst_lt (*length, val))
1014 else if (simple_cst_equal (val, *length) != 1)
1022 /* If we were already here, break the infinite cycle. */
1023 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1027 def_stmt = SSA_NAME_DEF_STMT (var);
1029 switch (gimple_code (def_stmt))
1032 /* The RHS of the statement defining VAR must either have a
1033 constant length or come from another SSA_NAME with a constant
1035 if (gimple_assign_single_p (def_stmt)
1036 || gimple_assign_unary_nop_p (def_stmt))
1038 tree rhs = gimple_assign_rhs1 (def_stmt);
1039 return get_maxval_strlen (rhs, length, visited, type);
1045 /* All the arguments of the PHI node must have the same constant
1049 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1051 tree arg = gimple_phi_arg (def_stmt, i)->def;
1053 /* If this PHI has itself as an argument, we cannot
1054 determine the string length of this argument. However,
1055 if we can find a constant string length for the other
1056 PHI args then we can still be sure that this is a
1057 constant string length. So be optimistic and just
1058 continue with the next argument. */
1059 if (arg == gimple_phi_result (def_stmt))
1062 if (!get_maxval_strlen (arg, length, visited, type))
1074 /* Fold builtin call in statement STMT. Returns a simplified tree.
1075 We may return a non-constant expression, including another call
1076 to a different function and with different arguments, e.g.,
1077 substituting memcpy for strcpy when the string length is known.
1078 Note that some builtins expand into inline code that may not
1079 be valid in GIMPLE. Callers must take care. */
1082 gimple_fold_builtin (gimple stmt)
1084 tree result, val[3];
1090 location_t loc = gimple_location (stmt);
1092 gcc_assert (is_gimple_call (stmt));
1094 ignore = (gimple_call_lhs (stmt) == NULL);
1096 /* First try the generic builtin folder. If that succeeds, return the
1098 result = fold_call_stmt (stmt, ignore);
1102 STRIP_NOPS (result);
1106 /* Ignore MD builtins. */
1107 callee = gimple_call_fndecl (stmt);
1108 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1111 /* If the builtin could not be folded, and it has no argument list,
1113 nargs = gimple_call_num_args (stmt);
1117 /* Limit the work only for builtins we know how to simplify. */
1118 switch (DECL_FUNCTION_CODE (callee))
1120 case BUILT_IN_STRLEN:
1121 case BUILT_IN_FPUTS:
1122 case BUILT_IN_FPUTS_UNLOCKED:
1126 case BUILT_IN_STRCPY:
1127 case BUILT_IN_STRNCPY:
1131 case BUILT_IN_MEMCPY_CHK:
1132 case BUILT_IN_MEMPCPY_CHK:
1133 case BUILT_IN_MEMMOVE_CHK:
1134 case BUILT_IN_MEMSET_CHK:
1135 case BUILT_IN_STRNCPY_CHK:
1139 case BUILT_IN_STRCPY_CHK:
1140 case BUILT_IN_STPCPY_CHK:
1144 case BUILT_IN_SNPRINTF_CHK:
1145 case BUILT_IN_VSNPRINTF_CHK:
1153 if (arg_idx >= nargs)
1156 /* Try to use the dataflow information gathered by the CCP process. */
1157 visited = BITMAP_ALLOC (NULL);
1158 bitmap_clear (visited);
1160 memset (val, 0, sizeof (val));
1161 a = gimple_call_arg (stmt, arg_idx);
1162 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1163 val[arg_idx] = NULL_TREE;
1165 BITMAP_FREE (visited);
1168 switch (DECL_FUNCTION_CODE (callee))
1170 case BUILT_IN_STRLEN:
1171 if (val[0] && nargs == 1)
1174 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1176 /* If the result is not a valid gimple value, or not a cast
1177 of a valid gimple value, then we cannot use the result. */
1178 if (is_gimple_val (new_val)
1179 || (is_gimple_cast (new_val)
1180 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1185 case BUILT_IN_STRCPY:
1186 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1187 result = fold_builtin_strcpy (loc, callee,
1188 gimple_call_arg (stmt, 0),
1189 gimple_call_arg (stmt, 1),
1193 case BUILT_IN_STRNCPY:
1194 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1195 result = fold_builtin_strncpy (loc, callee,
1196 gimple_call_arg (stmt, 0),
1197 gimple_call_arg (stmt, 1),
1198 gimple_call_arg (stmt, 2),
1202 case BUILT_IN_FPUTS:
1204 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1205 gimple_call_arg (stmt, 1),
1206 ignore, false, val[0]);
1209 case BUILT_IN_FPUTS_UNLOCKED:
1211 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1212 gimple_call_arg (stmt, 1),
1213 ignore, true, val[0]);
1216 case BUILT_IN_MEMCPY_CHK:
1217 case BUILT_IN_MEMPCPY_CHK:
1218 case BUILT_IN_MEMMOVE_CHK:
1219 case BUILT_IN_MEMSET_CHK:
1220 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1221 result = fold_builtin_memory_chk (loc, callee,
1222 gimple_call_arg (stmt, 0),
1223 gimple_call_arg (stmt, 1),
1224 gimple_call_arg (stmt, 2),
1225 gimple_call_arg (stmt, 3),
1227 DECL_FUNCTION_CODE (callee));
1230 case BUILT_IN_STRCPY_CHK:
1231 case BUILT_IN_STPCPY_CHK:
1232 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1233 result = fold_builtin_stxcpy_chk (loc, callee,
1234 gimple_call_arg (stmt, 0),
1235 gimple_call_arg (stmt, 1),
1236 gimple_call_arg (stmt, 2),
1238 DECL_FUNCTION_CODE (callee));
1241 case BUILT_IN_STRNCPY_CHK:
1242 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1243 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1244 gimple_call_arg (stmt, 1),
1245 gimple_call_arg (stmt, 2),
1246 gimple_call_arg (stmt, 3),
1250 case BUILT_IN_SNPRINTF_CHK:
1251 case BUILT_IN_VSNPRINTF_CHK:
1252 if (val[1] && is_gimple_val (val[1]))
1253 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1254 DECL_FUNCTION_CODE (callee));
1261 if (result && ignore)
1262 result = fold_ignored_result (result);
1266 /* Return the first of the base binfos of BINFO that has virtual functions. */
1269 get_first_base_binfo_with_virtuals (tree binfo)
1274 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1275 if (BINFO_VIRTUALS (base_binfo))
1282 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1283 it is found or NULL_TREE if it is not. */
1286 get_base_binfo_for_type (tree binfo, tree type)
1290 tree res = NULL_TREE;
1292 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1293 if (TREE_TYPE (base_binfo) == type)
1302 /* Return a binfo describing the part of object referenced by expression REF.
1303 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1304 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1305 a simple declaration, indirect reference or an SSA_NAME. If the function
1306 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1307 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1308 Otherwise the first non-artificial field declaration or the base declaration
1309 will be examined to get the encapsulating type. */
1312 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1316 if (TREE_CODE (ref) == COMPONENT_REF)
1319 tree binfo, base_binfo;
1320 tree field = TREE_OPERAND (ref, 1);
1322 if (!DECL_ARTIFICIAL (field))
1324 tree type = TREE_TYPE (field);
1325 if (TREE_CODE (type) == RECORD_TYPE)
1326 return TYPE_BINFO (type);
1331 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1332 binfo = TYPE_BINFO (par_type);
1334 || BINFO_N_BASE_BINFOS (binfo) == 0)
1337 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1338 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1342 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1344 /* Get descendant binfo. */
1347 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1350 ref = TREE_OPERAND (ref, 0);
1352 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1353 return TYPE_BINFO (TREE_TYPE (ref));
1354 else if (known_binfo
1355 && (TREE_CODE (ref) == SSA_NAME
1356 || TREE_CODE (ref) == MEM_REF))
1363 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1364 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1365 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1368 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1372 struct cgraph_node *node;
1374 v = BINFO_VIRTUALS (known_binfo);
1378 i += (TARGET_VTABLE_USES_DESCRIPTORS
1379 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1383 fndecl = TREE_VALUE (v);
1384 node = cgraph_get_node (fndecl);
1385 /* When cgraph node is missing and function is not public, we cannot
1386 devirtualize. This can happen in WHOPR when the actual method
1387 ends up in other partition, because we found devirtualization
1388 possibility too late. */
1389 if ((!node || (!node->analyzed && !node->in_other_partition))
1390 && (!TREE_PUBLIC (fndecl) || DECL_COMDAT (fndecl)))
1392 return build_fold_addr_expr (fndecl);
1396 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1397 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1398 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1399 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1400 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1403 gimple_fold_obj_type_ref (tree ref, tree known_type)
1405 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1406 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1409 if (TREE_CODE (obj) == ADDR_EXPR)
1410 obj = TREE_OPERAND (obj, 0);
1412 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1415 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1416 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1422 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1423 The statement may be replaced by another statement, e.g., if the call
1424 simplifies to a constant value. Return true if any changes were made.
1425 It is assumed that the operands have been previously folded. */
1428 fold_gimple_call (gimple_stmt_iterator *gsi)
1430 gimple stmt = gsi_stmt (*gsi);
1432 tree callee = gimple_call_fndecl (stmt);
1434 /* Check for builtins that CCP can handle using information not
1435 available in the generic fold routines. */
1436 if (callee && DECL_BUILT_IN (callee))
1438 tree result = gimple_fold_builtin (stmt);
1442 if (!update_call_from_tree (gsi, result))
1443 gimplify_and_update_call_from_tree (gsi, result);
1449 /* ??? Should perhaps do this in fold proper. However, doing it
1450 there requires that we create a new CALL_EXPR, and that requires
1451 copying EH region info to the new node. Easier to just do it
1452 here where we can just smash the call operand. */
1453 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1454 callee = gimple_call_fn (stmt);
1455 if (TREE_CODE (callee) == OBJ_TYPE_REF
1456 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1460 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1463 gimple_call_set_fn (stmt, t);
1472 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1473 distinguishes both cases. */
1476 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1478 bool changed = false;
1479 gimple stmt = gsi_stmt (*gsi);
1482 /* Fold the main computation performed by the statement. */
1483 switch (gimple_code (stmt))
1487 unsigned old_num_ops = gimple_num_ops (stmt);
1488 tree new_rhs = fold_gimple_assign (gsi);
1489 tree lhs = gimple_assign_lhs (stmt);
1491 && !useless_type_conversion_p (TREE_TYPE (lhs),
1492 TREE_TYPE (new_rhs)))
1493 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1496 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1498 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1505 changed |= fold_gimple_cond (stmt);
1509 /* Fold *& in call arguments. */
1510 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1511 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1513 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1516 gimple_call_set_arg (stmt, i, tmp);
1520 /* The entire statement may be replaced in this case. */
1522 changed |= fold_gimple_call (gsi);
1526 /* Fold *& in asm operands. */
1527 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1529 tree link = gimple_asm_output_op (stmt, i);
1530 tree op = TREE_VALUE (link);
1531 if (REFERENCE_CLASS_P (op)
1532 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1534 TREE_VALUE (link) = op;
1538 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1540 tree link = gimple_asm_input_op (stmt, i);
1541 tree op = TREE_VALUE (link);
1542 if (REFERENCE_CLASS_P (op)
1543 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1545 TREE_VALUE (link) = op;
1552 if (gimple_debug_bind_p (stmt))
1554 tree val = gimple_debug_bind_get_value (stmt);
1556 && REFERENCE_CLASS_P (val))
1558 tree tem = maybe_fold_reference (val, false);
1561 gimple_debug_bind_set_value (stmt, tem);
1571 stmt = gsi_stmt (*gsi);
1573 /* Fold *& on the lhs. */
1574 if (gimple_has_lhs (stmt))
1576 tree lhs = gimple_get_lhs (stmt);
1577 if (lhs && REFERENCE_CLASS_P (lhs))
1579 tree new_lhs = maybe_fold_reference (lhs, true);
1582 gimple_set_lhs (stmt, new_lhs);
1591 /* Fold the statement pointed to by GSI. In some cases, this function may
1592 replace the whole statement with a new one. Returns true iff folding
1594 The statement pointed to by GSI should be in valid gimple form but may
1595 be in unfolded state as resulting from for example constant propagation
1596 which can produce *&x = 0. */
1599 fold_stmt (gimple_stmt_iterator *gsi)
1601 return fold_stmt_1 (gsi, false);
1604 /* Perform the minimal folding on statement STMT. Only operations like
1605 *&x created by constant propagation are handled. The statement cannot
1606 be replaced with a new one. Return true if the statement was
1607 changed, false otherwise.
1608 The statement STMT should be in valid gimple form but may
1609 be in unfolded state as resulting from for example constant propagation
1610 which can produce *&x = 0. */
1613 fold_stmt_inplace (gimple stmt)
1615 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1616 bool changed = fold_stmt_1 (&gsi, true);
1617 gcc_assert (gsi_stmt (gsi) == stmt);
1621 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1622 if EXPR is null or we don't know how.
1623 If non-null, the result always has boolean type. */
1626 canonicalize_bool (tree expr, bool invert)
1632 if (integer_nonzerop (expr))
1633 return boolean_false_node;
1634 else if (integer_zerop (expr))
1635 return boolean_true_node;
1636 else if (TREE_CODE (expr) == SSA_NAME)
1637 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1638 build_int_cst (TREE_TYPE (expr), 0));
1639 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1640 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1642 TREE_OPERAND (expr, 0),
1643 TREE_OPERAND (expr, 1));
1649 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1651 if (integer_nonzerop (expr))
1652 return boolean_true_node;
1653 else if (integer_zerop (expr))
1654 return boolean_false_node;
1655 else if (TREE_CODE (expr) == SSA_NAME)
1656 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1657 build_int_cst (TREE_TYPE (expr), 0));
1658 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1659 return fold_build2 (TREE_CODE (expr),
1661 TREE_OPERAND (expr, 0),
1662 TREE_OPERAND (expr, 1));
1668 /* Check to see if a boolean expression EXPR is logically equivalent to the
1669 comparison (OP1 CODE OP2). Check for various identities involving
1673 same_bool_comparison_p (const_tree expr, enum tree_code code,
1674 const_tree op1, const_tree op2)
1678 /* The obvious case. */
1679 if (TREE_CODE (expr) == code
1680 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1681 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1684 /* Check for comparing (name, name != 0) and the case where expr
1685 is an SSA_NAME with a definition matching the comparison. */
1686 if (TREE_CODE (expr) == SSA_NAME
1687 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1689 if (operand_equal_p (expr, op1, 0))
1690 return ((code == NE_EXPR && integer_zerop (op2))
1691 || (code == EQ_EXPR && integer_nonzerop (op2)));
1692 s = SSA_NAME_DEF_STMT (expr);
1693 if (is_gimple_assign (s)
1694 && gimple_assign_rhs_code (s) == code
1695 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1696 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1700 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1701 of name is a comparison, recurse. */
1702 if (TREE_CODE (op1) == SSA_NAME
1703 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1705 s = SSA_NAME_DEF_STMT (op1);
1706 if (is_gimple_assign (s)
1707 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1709 enum tree_code c = gimple_assign_rhs_code (s);
1710 if ((c == NE_EXPR && integer_zerop (op2))
1711 || (c == EQ_EXPR && integer_nonzerop (op2)))
1712 return same_bool_comparison_p (expr, c,
1713 gimple_assign_rhs1 (s),
1714 gimple_assign_rhs2 (s));
1715 if ((c == EQ_EXPR && integer_zerop (op2))
1716 || (c == NE_EXPR && integer_nonzerop (op2)))
1717 return same_bool_comparison_p (expr,
1718 invert_tree_comparison (c, false),
1719 gimple_assign_rhs1 (s),
1720 gimple_assign_rhs2 (s));
1726 /* Check to see if two boolean expressions OP1 and OP2 are logically
1730 same_bool_result_p (const_tree op1, const_tree op2)
1732 /* Simple cases first. */
1733 if (operand_equal_p (op1, op2, 0))
1736 /* Check the cases where at least one of the operands is a comparison.
1737 These are a bit smarter than operand_equal_p in that they apply some
1738 identifies on SSA_NAMEs. */
1739 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1740 && same_bool_comparison_p (op1, TREE_CODE (op2),
1741 TREE_OPERAND (op2, 0),
1742 TREE_OPERAND (op2, 1)))
1744 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1745 && same_bool_comparison_p (op2, TREE_CODE (op1),
1746 TREE_OPERAND (op1, 0),
1747 TREE_OPERAND (op1, 1)))
1754 /* Forward declarations for some mutually recursive functions. */
1757 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1758 enum tree_code code2, tree op2a, tree op2b);
1760 and_var_with_comparison (tree var, bool invert,
1761 enum tree_code code2, tree op2a, tree op2b);
1763 and_var_with_comparison_1 (gimple stmt,
1764 enum tree_code code2, tree op2a, tree op2b);
1766 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1767 enum tree_code code2, tree op2a, tree op2b);
1769 or_var_with_comparison (tree var, bool invert,
1770 enum tree_code code2, tree op2a, tree op2b);
1772 or_var_with_comparison_1 (gimple stmt,
1773 enum tree_code code2, tree op2a, tree op2b);
1775 /* Helper function for and_comparisons_1: try to simplify the AND of the
1776 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1777 If INVERT is true, invert the value of the VAR before doing the AND.
1778 Return NULL_EXPR if we can't simplify this to a single expression. */
1781 and_var_with_comparison (tree var, bool invert,
1782 enum tree_code code2, tree op2a, tree op2b)
1785 gimple stmt = SSA_NAME_DEF_STMT (var);
1787 /* We can only deal with variables whose definitions are assignments. */
1788 if (!is_gimple_assign (stmt))
1791 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1792 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1793 Then we only have to consider the simpler non-inverted cases. */
1795 t = or_var_with_comparison_1 (stmt,
1796 invert_tree_comparison (code2, false),
1799 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1800 return canonicalize_bool (t, invert);
1803 /* Try to simplify the AND of the ssa variable defined by the assignment
1804 STMT with the comparison specified by (OP2A CODE2 OP2B).
1805 Return NULL_EXPR if we can't simplify this to a single expression. */
1808 and_var_with_comparison_1 (gimple stmt,
1809 enum tree_code code2, tree op2a, tree op2b)
1811 tree var = gimple_assign_lhs (stmt);
1812 tree true_test_var = NULL_TREE;
1813 tree false_test_var = NULL_TREE;
1814 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1816 /* Check for identities like (var AND (var == 0)) => false. */
1817 if (TREE_CODE (op2a) == SSA_NAME
1818 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1820 if ((code2 == NE_EXPR && integer_zerop (op2b))
1821 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1823 true_test_var = op2a;
1824 if (var == true_test_var)
1827 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1828 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1830 false_test_var = op2a;
1831 if (var == false_test_var)
1832 return boolean_false_node;
1836 /* If the definition is a comparison, recurse on it. */
1837 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1839 tree t = and_comparisons_1 (innercode,
1840 gimple_assign_rhs1 (stmt),
1841 gimple_assign_rhs2 (stmt),
1849 /* If the definition is an AND or OR expression, we may be able to
1850 simplify by reassociating. */
1851 if (innercode == TRUTH_AND_EXPR
1852 || innercode == TRUTH_OR_EXPR
1853 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1854 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1856 tree inner1 = gimple_assign_rhs1 (stmt);
1857 tree inner2 = gimple_assign_rhs2 (stmt);
1860 tree partial = NULL_TREE;
1861 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1863 /* Check for boolean identities that don't require recursive examination
1865 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1866 inner1 AND (inner1 OR inner2) => inner1
1867 !inner1 AND (inner1 AND inner2) => false
1868 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1869 Likewise for similar cases involving inner2. */
1870 if (inner1 == true_test_var)
1871 return (is_and ? var : inner1);
1872 else if (inner2 == true_test_var)
1873 return (is_and ? var : inner2);
1874 else if (inner1 == false_test_var)
1876 ? boolean_false_node
1877 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1878 else if (inner2 == false_test_var)
1880 ? boolean_false_node
1881 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1883 /* Next, redistribute/reassociate the AND across the inner tests.
1884 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1885 if (TREE_CODE (inner1) == SSA_NAME
1886 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1887 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1888 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1889 gimple_assign_rhs1 (s),
1890 gimple_assign_rhs2 (s),
1891 code2, op2a, op2b)))
1893 /* Handle the AND case, where we are reassociating:
1894 (inner1 AND inner2) AND (op2a code2 op2b)
1896 If the partial result t is a constant, we win. Otherwise
1897 continue on to try reassociating with the other inner test. */
1900 if (integer_onep (t))
1902 else if (integer_zerop (t))
1903 return boolean_false_node;
1906 /* Handle the OR case, where we are redistributing:
1907 (inner1 OR inner2) AND (op2a code2 op2b)
1908 => (t OR (inner2 AND (op2a code2 op2b))) */
1911 if (integer_onep (t))
1912 return boolean_true_node;
1914 /* Save partial result for later. */
1919 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1920 if (TREE_CODE (inner2) == SSA_NAME
1921 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1922 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1923 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1924 gimple_assign_rhs1 (s),
1925 gimple_assign_rhs2 (s),
1926 code2, op2a, op2b)))
1928 /* Handle the AND case, where we are reassociating:
1929 (inner1 AND inner2) AND (op2a code2 op2b)
1930 => (inner1 AND t) */
1933 if (integer_onep (t))
1935 else if (integer_zerop (t))
1936 return boolean_false_node;
1939 /* Handle the OR case. where we are redistributing:
1940 (inner1 OR inner2) AND (op2a code2 op2b)
1941 => (t OR (inner1 AND (op2a code2 op2b)))
1942 => (t OR partial) */
1945 if (integer_onep (t))
1946 return boolean_true_node;
1949 /* We already got a simplification for the other
1950 operand to the redistributed OR expression. The
1951 interesting case is when at least one is false.
1952 Or, if both are the same, we can apply the identity
1954 if (integer_zerop (partial))
1956 else if (integer_zerop (t))
1958 else if (same_bool_result_p (t, partial))
1967 /* Try to simplify the AND of two comparisons defined by
1968 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1969 If this can be done without constructing an intermediate value,
1970 return the resulting tree; otherwise NULL_TREE is returned.
1971 This function is deliberately asymmetric as it recurses on SSA_DEFs
1972 in the first comparison but not the second. */
1975 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1976 enum tree_code code2, tree op2a, tree op2b)
1978 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1979 if (operand_equal_p (op1a, op2a, 0)
1980 && operand_equal_p (op1b, op2b, 0))
1982 tree t = combine_comparisons (UNKNOWN_LOCATION,
1983 TRUTH_ANDIF_EXPR, code1, code2,
1984 boolean_type_node, op1a, op1b);
1989 /* Likewise the swapped case of the above. */
1990 if (operand_equal_p (op1a, op2b, 0)
1991 && operand_equal_p (op1b, op2a, 0))
1993 tree t = combine_comparisons (UNKNOWN_LOCATION,
1994 TRUTH_ANDIF_EXPR, code1,
1995 swap_tree_comparison (code2),
1996 boolean_type_node, op1a, op1b);
2001 /* If both comparisons are of the same value against constants, we might
2002 be able to merge them. */
2003 if (operand_equal_p (op1a, op2a, 0)
2004 && TREE_CODE (op1b) == INTEGER_CST
2005 && TREE_CODE (op2b) == INTEGER_CST)
2007 int cmp = tree_int_cst_compare (op1b, op2b);
2009 /* If we have (op1a == op1b), we should either be able to
2010 return that or FALSE, depending on whether the constant op1b
2011 also satisfies the other comparison against op2b. */
2012 if (code1 == EQ_EXPR)
2018 case EQ_EXPR: val = (cmp == 0); break;
2019 case NE_EXPR: val = (cmp != 0); break;
2020 case LT_EXPR: val = (cmp < 0); break;
2021 case GT_EXPR: val = (cmp > 0); break;
2022 case LE_EXPR: val = (cmp <= 0); break;
2023 case GE_EXPR: val = (cmp >= 0); break;
2024 default: done = false;
2029 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2031 return boolean_false_node;
2034 /* Likewise if the second comparison is an == comparison. */
2035 else if (code2 == EQ_EXPR)
2041 case EQ_EXPR: val = (cmp == 0); break;
2042 case NE_EXPR: val = (cmp != 0); break;
2043 case LT_EXPR: val = (cmp > 0); break;
2044 case GT_EXPR: val = (cmp < 0); break;
2045 case LE_EXPR: val = (cmp >= 0); break;
2046 case GE_EXPR: val = (cmp <= 0); break;
2047 default: done = false;
2052 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2054 return boolean_false_node;
2058 /* Same business with inequality tests. */
2059 else if (code1 == NE_EXPR)
2064 case EQ_EXPR: val = (cmp != 0); break;
2065 case NE_EXPR: val = (cmp == 0); break;
2066 case LT_EXPR: val = (cmp >= 0); break;
2067 case GT_EXPR: val = (cmp <= 0); break;
2068 case LE_EXPR: val = (cmp > 0); break;
2069 case GE_EXPR: val = (cmp < 0); break;
2074 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2076 else if (code2 == NE_EXPR)
2081 case EQ_EXPR: val = (cmp == 0); break;
2082 case NE_EXPR: val = (cmp != 0); break;
2083 case LT_EXPR: val = (cmp <= 0); break;
2084 case GT_EXPR: val = (cmp >= 0); break;
2085 case LE_EXPR: val = (cmp < 0); break;
2086 case GE_EXPR: val = (cmp > 0); break;
2091 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2094 /* Chose the more restrictive of two < or <= comparisons. */
2095 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2096 && (code2 == LT_EXPR || code2 == LE_EXPR))
2098 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2099 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2101 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2104 /* Likewise chose the more restrictive of two > or >= comparisons. */
2105 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2106 && (code2 == GT_EXPR || code2 == GE_EXPR))
2108 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2109 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2111 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2114 /* Check for singleton ranges. */
2116 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2117 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2118 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2120 /* Check for disjoint ranges. */
2122 && (code1 == LT_EXPR || code1 == LE_EXPR)
2123 && (code2 == GT_EXPR || code2 == GE_EXPR))
2124 return boolean_false_node;
2126 && (code1 == GT_EXPR || code1 == GE_EXPR)
2127 && (code2 == LT_EXPR || code2 == LE_EXPR))
2128 return boolean_false_node;
2131 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2132 NAME's definition is a truth value. See if there are any simplifications
2133 that can be done against the NAME's definition. */
2134 if (TREE_CODE (op1a) == SSA_NAME
2135 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2136 && (integer_zerop (op1b) || integer_onep (op1b)))
2138 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2139 || (code1 == NE_EXPR && integer_onep (op1b)));
2140 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2141 switch (gimple_code (stmt))
2144 /* Try to simplify by copy-propagating the definition. */
2145 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2148 /* If every argument to the PHI produces the same result when
2149 ANDed with the second comparison, we win.
2150 Do not do this unless the type is bool since we need a bool
2151 result here anyway. */
2152 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2154 tree result = NULL_TREE;
2156 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2158 tree arg = gimple_phi_arg_def (stmt, i);
2160 /* If this PHI has itself as an argument, ignore it.
2161 If all the other args produce the same result,
2163 if (arg == gimple_phi_result (stmt))
2165 else if (TREE_CODE (arg) == INTEGER_CST)
2167 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2170 result = boolean_false_node;
2171 else if (!integer_zerop (result))
2175 result = fold_build2 (code2, boolean_type_node,
2177 else if (!same_bool_comparison_p (result,
2181 else if (TREE_CODE (arg) == SSA_NAME)
2183 tree temp = and_var_with_comparison (arg, invert,
2189 else if (!same_bool_result_p (result, temp))
2205 /* Try to simplify the AND of two comparisons, specified by
2206 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2207 If this can be simplified to a single expression (without requiring
2208 introducing more SSA variables to hold intermediate values),
2209 return the resulting tree. Otherwise return NULL_TREE.
2210 If the result expression is non-null, it has boolean type. */
2213 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2214 enum tree_code code2, tree op2a, tree op2b)
2216 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2220 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2223 /* Helper function for or_comparisons_1: try to simplify the OR of the
2224 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2225 If INVERT is true, invert the value of VAR before doing the OR.
2226 Return NULL_EXPR if we can't simplify this to a single expression. */
2229 or_var_with_comparison (tree var, bool invert,
2230 enum tree_code code2, tree op2a, tree op2b)
2233 gimple stmt = SSA_NAME_DEF_STMT (var);
2235 /* We can only deal with variables whose definitions are assignments. */
2236 if (!is_gimple_assign (stmt))
2239 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2240 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2241 Then we only have to consider the simpler non-inverted cases. */
2243 t = and_var_with_comparison_1 (stmt,
2244 invert_tree_comparison (code2, false),
2247 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2248 return canonicalize_bool (t, invert);
2251 /* Try to simplify the OR of the ssa variable defined by the assignment
2252 STMT with the comparison specified by (OP2A CODE2 OP2B).
2253 Return NULL_EXPR if we can't simplify this to a single expression. */
2256 or_var_with_comparison_1 (gimple stmt,
2257 enum tree_code code2, tree op2a, tree op2b)
2259 tree var = gimple_assign_lhs (stmt);
2260 tree true_test_var = NULL_TREE;
2261 tree false_test_var = NULL_TREE;
2262 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2264 /* Check for identities like (var OR (var != 0)) => true . */
2265 if (TREE_CODE (op2a) == SSA_NAME
2266 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2268 if ((code2 == NE_EXPR && integer_zerop (op2b))
2269 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2271 true_test_var = op2a;
2272 if (var == true_test_var)
2275 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2276 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2278 false_test_var = op2a;
2279 if (var == false_test_var)
2280 return boolean_true_node;
2284 /* If the definition is a comparison, recurse on it. */
2285 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2287 tree t = or_comparisons_1 (innercode,
2288 gimple_assign_rhs1 (stmt),
2289 gimple_assign_rhs2 (stmt),
2297 /* If the definition is an AND or OR expression, we may be able to
2298 simplify by reassociating. */
2299 if (innercode == TRUTH_AND_EXPR
2300 || innercode == TRUTH_OR_EXPR
2301 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2302 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2304 tree inner1 = gimple_assign_rhs1 (stmt);
2305 tree inner2 = gimple_assign_rhs2 (stmt);
2308 tree partial = NULL_TREE;
2309 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2311 /* Check for boolean identities that don't require recursive examination
2313 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2314 inner1 OR (inner1 AND inner2) => inner1
2315 !inner1 OR (inner1 OR inner2) => true
2316 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2318 if (inner1 == true_test_var)
2319 return (is_or ? var : inner1);
2320 else if (inner2 == true_test_var)
2321 return (is_or ? var : inner2);
2322 else if (inner1 == false_test_var)
2325 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2326 else if (inner2 == false_test_var)
2329 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2331 /* Next, redistribute/reassociate the OR across the inner tests.
2332 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2333 if (TREE_CODE (inner1) == SSA_NAME
2334 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2335 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2336 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2337 gimple_assign_rhs1 (s),
2338 gimple_assign_rhs2 (s),
2339 code2, op2a, op2b)))
2341 /* Handle the OR case, where we are reassociating:
2342 (inner1 OR inner2) OR (op2a code2 op2b)
2344 If the partial result t is a constant, we win. Otherwise
2345 continue on to try reassociating with the other inner test. */
2346 if (innercode == TRUTH_OR_EXPR)
2348 if (integer_onep (t))
2349 return boolean_true_node;
2350 else if (integer_zerop (t))
2354 /* Handle the AND case, where we are redistributing:
2355 (inner1 AND inner2) OR (op2a code2 op2b)
2356 => (t AND (inner2 OR (op2a code op2b))) */
2359 if (integer_zerop (t))
2360 return boolean_false_node;
2362 /* Save partial result for later. */
2367 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2368 if (TREE_CODE (inner2) == SSA_NAME
2369 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2370 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2371 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2372 gimple_assign_rhs1 (s),
2373 gimple_assign_rhs2 (s),
2374 code2, op2a, op2b)))
2376 /* Handle the OR case, where we are reassociating:
2377 (inner1 OR inner2) OR (op2a code2 op2b)
2379 if (innercode == TRUTH_OR_EXPR)
2381 if (integer_zerop (t))
2383 else if (integer_onep (t))
2384 return boolean_true_node;
2387 /* Handle the AND case, where we are redistributing:
2388 (inner1 AND inner2) OR (op2a code2 op2b)
2389 => (t AND (inner1 OR (op2a code2 op2b)))
2390 => (t AND partial) */
2393 if (integer_zerop (t))
2394 return boolean_false_node;
2397 /* We already got a simplification for the other
2398 operand to the redistributed AND expression. The
2399 interesting case is when at least one is true.
2400 Or, if both are the same, we can apply the identity
2401 (x AND x) == true. */
2402 if (integer_onep (partial))
2404 else if (integer_onep (t))
2406 else if (same_bool_result_p (t, partial))
2407 return boolean_true_node;
2415 /* Try to simplify the OR of two comparisons defined by
2416 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2417 If this can be done without constructing an intermediate value,
2418 return the resulting tree; otherwise NULL_TREE is returned.
2419 This function is deliberately asymmetric as it recurses on SSA_DEFs
2420 in the first comparison but not the second. */
2423 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2424 enum tree_code code2, tree op2a, tree op2b)
2426 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2427 if (operand_equal_p (op1a, op2a, 0)
2428 && operand_equal_p (op1b, op2b, 0))
2430 tree t = combine_comparisons (UNKNOWN_LOCATION,
2431 TRUTH_ORIF_EXPR, code1, code2,
2432 boolean_type_node, op1a, op1b);
2437 /* Likewise the swapped case of the above. */
2438 if (operand_equal_p (op1a, op2b, 0)
2439 && operand_equal_p (op1b, op2a, 0))
2441 tree t = combine_comparisons (UNKNOWN_LOCATION,
2442 TRUTH_ORIF_EXPR, code1,
2443 swap_tree_comparison (code2),
2444 boolean_type_node, op1a, op1b);
2449 /* If both comparisons are of the same value against constants, we might
2450 be able to merge them. */
2451 if (operand_equal_p (op1a, op2a, 0)
2452 && TREE_CODE (op1b) == INTEGER_CST
2453 && TREE_CODE (op2b) == INTEGER_CST)
2455 int cmp = tree_int_cst_compare (op1b, op2b);
2457 /* If we have (op1a != op1b), we should either be able to
2458 return that or TRUE, depending on whether the constant op1b
2459 also satisfies the other comparison against op2b. */
2460 if (code1 == NE_EXPR)
2466 case EQ_EXPR: val = (cmp == 0); break;
2467 case NE_EXPR: val = (cmp != 0); break;
2468 case LT_EXPR: val = (cmp < 0); break;
2469 case GT_EXPR: val = (cmp > 0); break;
2470 case LE_EXPR: val = (cmp <= 0); break;
2471 case GE_EXPR: val = (cmp >= 0); break;
2472 default: done = false;
2477 return boolean_true_node;
2479 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2482 /* Likewise if the second comparison is a != comparison. */
2483 else if (code2 == NE_EXPR)
2489 case EQ_EXPR: val = (cmp == 0); break;
2490 case NE_EXPR: val = (cmp != 0); break;
2491 case LT_EXPR: val = (cmp > 0); break;
2492 case GT_EXPR: val = (cmp < 0); break;
2493 case LE_EXPR: val = (cmp >= 0); break;
2494 case GE_EXPR: val = (cmp <= 0); break;
2495 default: done = false;
2500 return boolean_true_node;
2502 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2506 /* See if an equality test is redundant with the other comparison. */
2507 else if (code1 == EQ_EXPR)
2512 case EQ_EXPR: val = (cmp == 0); break;
2513 case NE_EXPR: val = (cmp != 0); break;
2514 case LT_EXPR: val = (cmp < 0); break;
2515 case GT_EXPR: val = (cmp > 0); break;
2516 case LE_EXPR: val = (cmp <= 0); break;
2517 case GE_EXPR: val = (cmp >= 0); break;
2522 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2524 else if (code2 == EQ_EXPR)
2529 case EQ_EXPR: val = (cmp == 0); break;
2530 case NE_EXPR: val = (cmp != 0); break;
2531 case LT_EXPR: val = (cmp > 0); break;
2532 case GT_EXPR: val = (cmp < 0); break;
2533 case LE_EXPR: val = (cmp >= 0); break;
2534 case GE_EXPR: val = (cmp <= 0); break;
2539 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2542 /* Chose the less restrictive of two < or <= comparisons. */
2543 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2544 && (code2 == LT_EXPR || code2 == LE_EXPR))
2546 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2547 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2549 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2552 /* Likewise chose the less restrictive of two > or >= comparisons. */
2553 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2554 && (code2 == GT_EXPR || code2 == GE_EXPR))
2556 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2557 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2559 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2562 /* Check for singleton ranges. */
2564 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2565 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2566 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2568 /* Check for less/greater pairs that don't restrict the range at all. */
2570 && (code1 == LT_EXPR || code1 == LE_EXPR)
2571 && (code2 == GT_EXPR || code2 == GE_EXPR))
2572 return boolean_true_node;
2574 && (code1 == GT_EXPR || code1 == GE_EXPR)
2575 && (code2 == LT_EXPR || code2 == LE_EXPR))
2576 return boolean_true_node;
2579 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2580 NAME's definition is a truth value. See if there are any simplifications
2581 that can be done against the NAME's definition. */
2582 if (TREE_CODE (op1a) == SSA_NAME
2583 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2584 && (integer_zerop (op1b) || integer_onep (op1b)))
2586 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2587 || (code1 == NE_EXPR && integer_onep (op1b)));
2588 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2589 switch (gimple_code (stmt))
2592 /* Try to simplify by copy-propagating the definition. */
2593 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2596 /* If every argument to the PHI produces the same result when
2597 ORed with the second comparison, we win.
2598 Do not do this unless the type is bool since we need a bool
2599 result here anyway. */
2600 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2602 tree result = NULL_TREE;
2604 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2606 tree arg = gimple_phi_arg_def (stmt, i);
2608 /* If this PHI has itself as an argument, ignore it.
2609 If all the other args produce the same result,
2611 if (arg == gimple_phi_result (stmt))
2613 else if (TREE_CODE (arg) == INTEGER_CST)
2615 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2618 result = boolean_true_node;
2619 else if (!integer_onep (result))
2623 result = fold_build2 (code2, boolean_type_node,
2625 else if (!same_bool_comparison_p (result,
2629 else if (TREE_CODE (arg) == SSA_NAME)
2631 tree temp = or_var_with_comparison (arg, invert,
2637 else if (!same_bool_result_p (result, temp))
2653 /* Try to simplify the OR of two comparisons, specified by
2654 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2655 If this can be simplified to a single expression (without requiring
2656 introducing more SSA variables to hold intermediate values),
2657 return the resulting tree. Otherwise return NULL_TREE.
2658 If the result expression is non-null, it has boolean type. */
2661 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2662 enum tree_code code2, tree op2a, tree op2b)
2664 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2668 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);