1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
39 get_symbol_constant_value (tree sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
45 tree val = DECL_INITIAL (sym);
49 if (is_gimple_min_invariant (val))
51 if (TREE_CODE (val) == ADDR_EXPR)
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
83 may_propagate_address_into_dereference (tree addr, tree deref)
85 gcc_assert (TREE_CODE (deref) == MEM_REF
86 && TREE_CODE (addr) == ADDR_EXPR);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement.
113 LOC is the location of the original expression. */
116 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
118 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
119 tree array_type, elt_type, elt_size;
122 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
123 measured in units of the size of elements type) from that ARRAY_REF).
124 We can't do anything if either is variable.
126 The case we handle here is *(&A[N]+O). */
127 if (TREE_CODE (base) == ARRAY_REF)
129 tree low_bound = array_ref_low_bound (base);
131 elt_offset = TREE_OPERAND (base, 1);
132 if (TREE_CODE (low_bound) != INTEGER_CST
133 || TREE_CODE (elt_offset) != INTEGER_CST)
136 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
137 base = TREE_OPERAND (base, 0);
140 /* Ignore stupid user tricks of indexing non-array variables. */
141 array_type = TREE_TYPE (base);
142 if (TREE_CODE (array_type) != ARRAY_TYPE)
144 elt_type = TREE_TYPE (array_type);
146 /* Use signed size type for intermediate computation on the index. */
147 idx_type = ssizetype;
149 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
150 element type (so we can use the alignment if it's not constant).
151 Otherwise, compute the offset as an index by using a division. If the
152 division isn't exact, then don't do anything. */
153 elt_size = TYPE_SIZE_UNIT (elt_type);
156 if (integer_zerop (offset))
158 if (TREE_CODE (elt_size) != INTEGER_CST)
159 elt_size = size_int (TYPE_ALIGN (elt_type));
161 idx = build_int_cst (idx_type, 0);
165 unsigned HOST_WIDE_INT lquo, lrem;
166 HOST_WIDE_INT hquo, hrem;
169 /* The final array offset should be signed, so we need
170 to sign-extend the (possibly pointer) offset here
171 and use signed division. */
172 soffset = double_int_sext (tree_to_double_int (offset),
173 TYPE_PRECISION (TREE_TYPE (offset)));
174 if (TREE_CODE (elt_size) != INTEGER_CST
175 || div_and_round_double (TRUNC_DIV_EXPR, 0,
176 soffset.low, soffset.high,
177 TREE_INT_CST_LOW (elt_size),
178 TREE_INT_CST_HIGH (elt_size),
179 &lquo, &hquo, &lrem, &hrem)
183 idx = build_int_cst_wide (idx_type, lquo, hquo);
186 /* Assume the low bound is zero. If there is a domain type, get the
187 low bound, if any, convert the index into that type, and add the
189 min_idx = build_int_cst (idx_type, 0);
190 domain_type = TYPE_DOMAIN (array_type);
193 idx_type = domain_type;
194 if (TYPE_MIN_VALUE (idx_type))
195 min_idx = TYPE_MIN_VALUE (idx_type);
197 min_idx = fold_convert (idx_type, min_idx);
199 if (TREE_CODE (min_idx) != INTEGER_CST)
202 elt_offset = fold_convert (idx_type, elt_offset);
205 if (!integer_zerop (min_idx))
206 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
207 if (!integer_zerop (elt_offset))
208 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
210 /* Make sure to possibly truncate late after offsetting. */
211 idx = fold_convert (idx_type, idx);
213 /* We don't want to construct access past array bounds. For example
216 should not be simplified into (*c)[14] or tree-vrp will
218 This is only an issue for multi-dimensional arrays. */
219 if (TREE_CODE (elt_type) == ARRAY_TYPE
222 if (TYPE_MAX_VALUE (domain_type)
223 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
224 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
226 else if (TYPE_MIN_VALUE (domain_type)
227 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
228 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
230 else if (compare_tree_int (idx, 0) < 0)
235 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
236 SET_EXPR_LOCATION (t, loc);
242 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
243 LOC is the location of original expression.
245 Before attempting the conversion strip off existing ADDR_EXPRs. */
248 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
254 if (TREE_CODE (base) != ADDR_EXPR)
257 base = TREE_OPERAND (base, 0);
258 if (types_compatible_p (orig_type, TREE_TYPE (base))
259 && integer_zerop (offset))
262 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
263 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
268 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
269 LOC is the location of the original expression. */
272 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
278 if (TREE_CODE (addr) != ADDR_EXPR)
280 base = TREE_OPERAND (addr, 0);
281 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
284 ret = build_fold_addr_expr (ret);
285 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
287 SET_EXPR_LOCATION (ret, loc);
294 /* A quaint feature extant in our address arithmetic is that there
295 can be hidden type changes here. The type of the result need
296 not be the same as the type of the input pointer.
298 What we're after here is an expression of the form
299 (T *)(&array + const)
300 where array is OP0, const is OP1, RES_TYPE is T and
301 the cast doesn't actually exist, but is implicit in the
302 type of the POINTER_PLUS_EXPR. We'd like to turn this into
304 which may be able to propagate further. */
307 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
312 /* The first operand should be an ADDR_EXPR. */
313 if (TREE_CODE (op0) != ADDR_EXPR)
315 op0 = TREE_OPERAND (op0, 0);
317 /* It had better be a constant. */
318 if (TREE_CODE (op1) != INTEGER_CST)
320 /* Or op0 should now be A[0] and the non-constant offset defined
321 via a multiplication by the array element size. */
322 if (TREE_CODE (op0) == ARRAY_REF
323 /* As we will end up creating a variable index array access
324 in the outermost array dimension make sure there isn't
325 a more inner array that the index could overflow to. */
326 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
327 && integer_zerop (TREE_OPERAND (op0, 1))
328 && TREE_CODE (op1) == SSA_NAME)
330 gimple offset_def = SSA_NAME_DEF_STMT (op1);
331 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
332 if (!host_integerp (elsz, 1)
333 || !is_gimple_assign (offset_def))
336 /* Do not build array references of something that we can't
337 see the true number of array dimensions for. */
338 if (!DECL_P (TREE_OPERAND (op0, 0))
339 && !handled_component_p (TREE_OPERAND (op0, 0)))
342 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
343 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
344 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
345 return build_fold_addr_expr
346 (build4 (ARRAY_REF, TREE_TYPE (op0),
347 TREE_OPERAND (op0, 0),
348 gimple_assign_rhs1 (offset_def),
349 TREE_OPERAND (op0, 2),
350 TREE_OPERAND (op0, 3)));
351 else if (integer_onep (elsz)
352 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
353 return build_fold_addr_expr
354 (build4 (ARRAY_REF, TREE_TYPE (op0),
355 TREE_OPERAND (op0, 0),
357 TREE_OPERAND (op0, 2),
358 TREE_OPERAND (op0, 3)));
360 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
362 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
363 && TREE_CODE (op1) == SSA_NAME)
365 gimple offset_def = SSA_NAME_DEF_STMT (op1);
366 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
367 if (!host_integerp (elsz, 1)
368 || !is_gimple_assign (offset_def))
371 /* Do not build array references of something that we can't
372 see the true number of array dimensions for. */
374 && !handled_component_p (op0))
377 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
378 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
379 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
380 return build_fold_addr_expr
381 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
382 op0, gimple_assign_rhs1 (offset_def),
383 integer_zero_node, NULL_TREE));
384 else if (integer_onep (elsz)
385 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
386 return build_fold_addr_expr
387 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
389 integer_zero_node, NULL_TREE));
395 /* If the first operand is an ARRAY_REF, expand it so that we can fold
396 the offset into it. */
397 while (TREE_CODE (op0) == ARRAY_REF)
399 tree array_obj = TREE_OPERAND (op0, 0);
400 tree array_idx = TREE_OPERAND (op0, 1);
401 tree elt_type = TREE_TYPE (op0);
402 tree elt_size = TYPE_SIZE_UNIT (elt_type);
405 if (TREE_CODE (array_idx) != INTEGER_CST)
407 if (TREE_CODE (elt_size) != INTEGER_CST)
410 /* Un-bias the index by the min index of the array type. */
411 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
414 min_idx = TYPE_MIN_VALUE (min_idx);
417 if (TREE_CODE (min_idx) != INTEGER_CST)
420 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
421 if (!integer_zerop (min_idx))
422 array_idx = int_const_binop (MINUS_EXPR, array_idx,
427 /* Convert the index to a byte offset. */
428 array_idx = fold_convert (sizetype, array_idx);
429 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
431 /* Update the operands for the next round, or for folding. */
432 op1 = int_const_binop (PLUS_EXPR,
437 ptd_type = TREE_TYPE (res_type);
438 /* If we want a pointer to void, reconstruct the reference from the
439 array element type. A pointer to that can be trivially converted
440 to void *. This happens as we fold (void *)(ptr p+ off). */
441 if (VOID_TYPE_P (ptd_type)
442 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
443 ptd_type = TREE_TYPE (TREE_TYPE (op0));
445 /* At which point we can try some of the same things as for indirects. */
446 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
449 t = build_fold_addr_expr (t);
450 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
452 SET_EXPR_LOCATION (t, loc);
458 /* Subroutine of fold_stmt. We perform several simplifications of the
459 memory reference tree EXPR and make sure to re-gimplify them properly
460 after propagation of constant addresses. IS_LHS is true if the
461 reference is supposed to be an lvalue. */
464 maybe_fold_reference (tree expr, bool is_lhs)
468 if (TREE_CODE (expr) == ARRAY_REF
471 tree tem = fold_read_from_constant_string (expr);
476 /* ??? We might want to open-code the relevant remaining cases
477 to avoid using the generic fold. */
478 if (handled_component_p (*t)
479 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
481 tree tem = fold (*t);
486 while (handled_component_p (*t))
487 t = &TREE_OPERAND (*t, 0);
489 /* Fold back MEM_REFs to reference trees. */
490 if (TREE_CODE (*t) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
492 && integer_zerop (TREE_OPERAND (*t, 1))
493 && (TREE_THIS_VOLATILE (*t)
494 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
495 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
496 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
497 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
498 /* We have to look out here to not drop a required conversion
499 from the rhs to the lhs if is_lhs, but we don't have the
500 rhs here to verify that. Thus require strict type
502 && types_compatible_p (TREE_TYPE (*t),
503 TREE_TYPE (TREE_OPERAND
504 (TREE_OPERAND (*t, 0), 0))))
507 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
508 tem = maybe_fold_reference (expr, is_lhs);
513 /* Canonicalize MEM_REFs invariant address operand. */
514 else if (TREE_CODE (*t) == MEM_REF
515 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
516 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
517 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
519 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
520 TREE_OPERAND (*t, 0),
521 TREE_OPERAND (*t, 1));
525 tem = maybe_fold_reference (expr, is_lhs);
534 tree tem = get_symbol_constant_value (*t);
536 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
538 *t = unshare_expr (tem);
539 tem = maybe_fold_reference (expr, is_lhs);
550 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
551 replacement rhs for the statement or NULL_TREE if no simplification
552 could be made. It is assumed that the operands have been previously
556 fold_gimple_assign (gimple_stmt_iterator *si)
558 gimple stmt = gsi_stmt (*si);
559 enum tree_code subcode = gimple_assign_rhs_code (stmt);
560 location_t loc = gimple_location (stmt);
562 tree result = NULL_TREE;
564 switch (get_gimple_rhs_class (subcode))
566 case GIMPLE_SINGLE_RHS:
568 tree rhs = gimple_assign_rhs1 (stmt);
570 /* Try to fold a conditional expression. */
571 if (TREE_CODE (rhs) == COND_EXPR)
573 tree op0 = COND_EXPR_COND (rhs);
576 location_t cond_loc = EXPR_LOCATION (rhs);
578 if (COMPARISON_CLASS_P (op0))
580 fold_defer_overflow_warnings ();
581 tem = fold_binary_loc (cond_loc,
582 TREE_CODE (op0), TREE_TYPE (op0),
583 TREE_OPERAND (op0, 0),
584 TREE_OPERAND (op0, 1));
585 /* This is actually a conditional expression, not a GIMPLE
586 conditional statement, however, the valid_gimple_rhs_p
587 test still applies. */
588 set = (tem && is_gimple_condexpr (tem)
589 && valid_gimple_rhs_p (tem));
590 fold_undefer_overflow_warnings (set, stmt, 0);
592 else if (is_gimple_min_invariant (op0))
601 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
602 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
605 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
606 return maybe_fold_tmr (rhs);
608 else if (REFERENCE_CLASS_P (rhs))
609 return maybe_fold_reference (rhs, false);
611 else if (TREE_CODE (rhs) == ADDR_EXPR)
613 tree ref = TREE_OPERAND (rhs, 0);
614 tree tem = maybe_fold_reference (ref, true);
616 && TREE_CODE (tem) == MEM_REF
617 && integer_zerop (TREE_OPERAND (tem, 1)))
618 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
620 result = fold_convert (TREE_TYPE (rhs),
621 build_fold_addr_expr_loc (loc, tem));
622 else if (TREE_CODE (ref) == MEM_REF
623 && integer_zerop (TREE_OPERAND (ref, 1)))
624 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
627 else if (TREE_CODE (rhs) == CONSTRUCTOR
628 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
629 && (CONSTRUCTOR_NELTS (rhs)
630 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
632 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
636 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
637 if (TREE_CODE (val) != INTEGER_CST
638 && TREE_CODE (val) != REAL_CST
639 && TREE_CODE (val) != FIXED_CST)
642 return build_vector_from_ctor (TREE_TYPE (rhs),
643 CONSTRUCTOR_ELTS (rhs));
646 else if (DECL_P (rhs))
647 return unshare_expr (get_symbol_constant_value (rhs));
649 /* If we couldn't fold the RHS, hand over to the generic
651 if (result == NULL_TREE)
654 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
655 that may have been added by fold, and "useless" type
656 conversions that might now be apparent due to propagation. */
657 STRIP_USELESS_TYPE_CONVERSION (result);
659 if (result != rhs && valid_gimple_rhs_p (result))
666 case GIMPLE_UNARY_RHS:
668 tree rhs = gimple_assign_rhs1 (stmt);
670 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
673 /* If the operation was a conversion do _not_ mark a
674 resulting constant with TREE_OVERFLOW if the original
675 constant was not. These conversions have implementation
676 defined behavior and retaining the TREE_OVERFLOW flag
677 here would confuse later passes such as VRP. */
678 if (CONVERT_EXPR_CODE_P (subcode)
679 && TREE_CODE (result) == INTEGER_CST
680 && TREE_CODE (rhs) == INTEGER_CST)
681 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
683 STRIP_USELESS_TYPE_CONVERSION (result);
684 if (valid_gimple_rhs_p (result))
687 else if (CONVERT_EXPR_CODE_P (subcode)
688 && POINTER_TYPE_P (gimple_expr_type (stmt))
689 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
691 tree type = gimple_expr_type (stmt);
692 tree t = maybe_fold_offset_to_address (loc,
693 gimple_assign_rhs1 (stmt),
694 integer_zero_node, type);
701 case GIMPLE_BINARY_RHS:
702 /* Try to fold pointer addition. */
703 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
705 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
706 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
708 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
709 if (!useless_type_conversion_p
710 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
711 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
713 result = maybe_fold_stmt_addition (gimple_location (stmt),
715 gimple_assign_rhs1 (stmt),
716 gimple_assign_rhs2 (stmt));
720 result = fold_binary_loc (loc, subcode,
721 TREE_TYPE (gimple_assign_lhs (stmt)),
722 gimple_assign_rhs1 (stmt),
723 gimple_assign_rhs2 (stmt));
727 STRIP_USELESS_TYPE_CONVERSION (result);
728 if (valid_gimple_rhs_p (result))
731 /* Fold might have produced non-GIMPLE, so if we trust it blindly
732 we lose canonicalization opportunities. Do not go again
733 through fold here though, or the same non-GIMPLE will be
735 if (commutative_tree_code (subcode)
736 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
737 gimple_assign_rhs2 (stmt), false))
738 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
739 gimple_assign_rhs2 (stmt),
740 gimple_assign_rhs1 (stmt));
744 case GIMPLE_TERNARY_RHS:
745 result = fold_ternary_loc (loc, subcode,
746 TREE_TYPE (gimple_assign_lhs (stmt)),
747 gimple_assign_rhs1 (stmt),
748 gimple_assign_rhs2 (stmt),
749 gimple_assign_rhs3 (stmt));
753 STRIP_USELESS_TYPE_CONVERSION (result);
754 if (valid_gimple_rhs_p (result))
757 /* Fold might have produced non-GIMPLE, so if we trust it blindly
758 we lose canonicalization opportunities. Do not go again
759 through fold here though, or the same non-GIMPLE will be
761 if (commutative_ternary_tree_code (subcode)
762 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
763 gimple_assign_rhs2 (stmt), false))
764 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
765 gimple_assign_rhs2 (stmt),
766 gimple_assign_rhs1 (stmt),
767 gimple_assign_rhs3 (stmt));
771 case GIMPLE_INVALID_RHS:
778 /* Attempt to fold a conditional statement. Return true if any changes were
779 made. We only attempt to fold the condition expression, and do not perform
780 any transformation that would require alteration of the cfg. It is
781 assumed that the operands have been previously folded. */
784 fold_gimple_cond (gimple stmt)
786 tree result = fold_binary_loc (gimple_location (stmt),
787 gimple_cond_code (stmt),
789 gimple_cond_lhs (stmt),
790 gimple_cond_rhs (stmt));
794 STRIP_USELESS_TYPE_CONVERSION (result);
795 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
797 gimple_cond_set_condition_from_tree (stmt, result);
805 /* Convert EXPR into a GIMPLE value suitable for substitution on the
806 RHS of an assignment. Insert the necessary statements before
807 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
808 is replaced. If the call is expected to produces a result, then it
809 is replaced by an assignment of the new RHS to the result variable.
810 If the result is to be ignored, then the call is replaced by a
811 GIMPLE_NOP. A proper VDEF chain is retained by making the first
812 VUSE and the last VDEF of the whole sequence be the same as the replaced
813 statement and using new SSA names for stores in between. */
816 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
819 tree tmp = NULL_TREE; /* Silence warning. */
820 gimple stmt, new_stmt;
821 gimple_stmt_iterator i;
822 gimple_seq stmts = gimple_seq_alloc();
823 struct gimplify_ctx gctx;
825 gimple laststore = NULL;
828 stmt = gsi_stmt (*si_p);
830 gcc_assert (is_gimple_call (stmt));
832 lhs = gimple_call_lhs (stmt);
833 reaching_vuse = gimple_vuse (stmt);
835 push_gimplify_context (&gctx);
837 if (lhs == NULL_TREE)
838 gimplify_and_add (expr, &stmts);
840 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
842 pop_gimplify_context (NULL);
844 if (gimple_has_location (stmt))
845 annotate_all_with_location (stmts, gimple_location (stmt));
847 /* The replacement can expose previously unreferenced variables. */
848 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
852 gsi_insert_before (si_p, last, GSI_NEW_STMT);
855 new_stmt = gsi_stmt (i);
856 find_new_referenced_vars (new_stmt);
857 mark_symbols_for_renaming (new_stmt);
858 /* If the new statement has a VUSE, update it with exact SSA name we
859 know will reach this one. */
860 if (gimple_vuse (new_stmt))
862 /* If we've also seen a previous store create a new VDEF for
863 the latter one, and make that the new reaching VUSE. */
866 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
867 gimple_set_vdef (laststore, reaching_vuse);
868 update_stmt (laststore);
871 gimple_set_vuse (new_stmt, reaching_vuse);
872 gimple_set_modified (new_stmt, true);
874 if (gimple_assign_single_p (new_stmt)
875 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
877 laststore = new_stmt;
882 if (lhs == NULL_TREE)
884 /* If we replace a call without LHS that has a VDEF and our new
885 sequence ends with a store we must make that store have the same
886 vdef in order not to break the sequencing. This can happen
887 for instance when folding memcpy calls into assignments. */
888 if (gimple_vdef (stmt) && laststore)
890 gimple_set_vdef (laststore, gimple_vdef (stmt));
891 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
892 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
893 update_stmt (laststore);
897 unlink_stmt_vdef (stmt);
906 gsi_insert_before (si_p, last, GSI_NEW_STMT);
909 if (laststore && is_gimple_reg (lhs))
911 gimple_set_vdef (laststore, gimple_vdef (stmt));
912 update_stmt (laststore);
913 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
914 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
919 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
920 gimple_set_vdef (laststore, reaching_vuse);
921 update_stmt (laststore);
924 new_stmt = gimple_build_assign (lhs, tmp);
925 if (!is_gimple_reg (tmp))
926 gimple_set_vuse (new_stmt, reaching_vuse);
927 if (!is_gimple_reg (lhs))
929 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
930 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
935 gimple_set_location (new_stmt, gimple_location (stmt));
936 gsi_replace (si_p, new_stmt, false);
939 /* Return the string length, maximum string length or maximum value of
941 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
942 is not NULL and, for TYPE == 0, its value is not equal to the length
943 we determine or if we are unable to determine the length or value,
944 return false. VISITED is a bitmap of visited variables.
945 TYPE is 0 if string length should be returned, 1 for maximum string
946 length and 2 for maximum value ARG can have. */
949 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
954 if (TREE_CODE (arg) != SSA_NAME)
956 if (TREE_CODE (arg) == COND_EXPR)
957 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
958 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
959 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
960 else if (TREE_CODE (arg) == ADDR_EXPR
961 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
962 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
964 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
965 if (TREE_CODE (aop0) == INDIRECT_REF
966 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
967 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
968 length, visited, type);
974 if (TREE_CODE (val) != INTEGER_CST
975 || tree_int_cst_sgn (val) < 0)
979 val = c_strlen (arg, 1);
987 if (TREE_CODE (*length) != INTEGER_CST
988 || TREE_CODE (val) != INTEGER_CST)
991 if (tree_int_cst_lt (*length, val))
995 else if (simple_cst_equal (val, *length) != 1)
1003 /* If we were already here, break the infinite cycle. */
1004 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1006 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1009 def_stmt = SSA_NAME_DEF_STMT (var);
1011 switch (gimple_code (def_stmt))
1014 /* The RHS of the statement defining VAR must either have a
1015 constant length or come from another SSA_NAME with a constant
1017 if (gimple_assign_single_p (def_stmt)
1018 || gimple_assign_unary_nop_p (def_stmt))
1020 tree rhs = gimple_assign_rhs1 (def_stmt);
1021 return get_maxval_strlen (rhs, length, visited, type);
1027 /* All the arguments of the PHI node must have the same constant
1031 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1033 tree arg = gimple_phi_arg (def_stmt, i)->def;
1035 /* If this PHI has itself as an argument, we cannot
1036 determine the string length of this argument. However,
1037 if we can find a constant string length for the other
1038 PHI args then we can still be sure that this is a
1039 constant string length. So be optimistic and just
1040 continue with the next argument. */
1041 if (arg == gimple_phi_result (def_stmt))
1044 if (!get_maxval_strlen (arg, length, visited, type))
1056 /* Fold builtin call in statement STMT. Returns a simplified tree.
1057 We may return a non-constant expression, including another call
1058 to a different function and with different arguments, e.g.,
1059 substituting memcpy for strcpy when the string length is known.
1060 Note that some builtins expand into inline code that may not
1061 be valid in GIMPLE. Callers must take care. */
1064 gimple_fold_builtin (gimple stmt)
1066 tree result, val[3];
1072 location_t loc = gimple_location (stmt);
1074 gcc_assert (is_gimple_call (stmt));
1076 ignore = (gimple_call_lhs (stmt) == NULL);
1078 /* First try the generic builtin folder. If that succeeds, return the
1080 result = fold_call_stmt (stmt, ignore);
1084 STRIP_NOPS (result);
1088 /* Ignore MD builtins. */
1089 callee = gimple_call_fndecl (stmt);
1090 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1093 /* If the builtin could not be folded, and it has no argument list,
1095 nargs = gimple_call_num_args (stmt);
1099 /* Limit the work only for builtins we know how to simplify. */
1100 switch (DECL_FUNCTION_CODE (callee))
1102 case BUILT_IN_STRLEN:
1103 case BUILT_IN_FPUTS:
1104 case BUILT_IN_FPUTS_UNLOCKED:
1108 case BUILT_IN_STRCPY:
1109 case BUILT_IN_STRNCPY:
1113 case BUILT_IN_MEMCPY_CHK:
1114 case BUILT_IN_MEMPCPY_CHK:
1115 case BUILT_IN_MEMMOVE_CHK:
1116 case BUILT_IN_MEMSET_CHK:
1117 case BUILT_IN_STRNCPY_CHK:
1121 case BUILT_IN_STRCPY_CHK:
1122 case BUILT_IN_STPCPY_CHK:
1126 case BUILT_IN_SNPRINTF_CHK:
1127 case BUILT_IN_VSNPRINTF_CHK:
1135 if (arg_idx >= nargs)
1138 /* Try to use the dataflow information gathered by the CCP process. */
1139 visited = BITMAP_ALLOC (NULL);
1140 bitmap_clear (visited);
1142 memset (val, 0, sizeof (val));
1143 a = gimple_call_arg (stmt, arg_idx);
1144 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1145 val[arg_idx] = NULL_TREE;
1147 BITMAP_FREE (visited);
1150 switch (DECL_FUNCTION_CODE (callee))
1152 case BUILT_IN_STRLEN:
1153 if (val[0] && nargs == 1)
1156 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1158 /* If the result is not a valid gimple value, or not a cast
1159 of a valid gimple value, then we can not use the result. */
1160 if (is_gimple_val (new_val)
1161 || (is_gimple_cast (new_val)
1162 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1167 case BUILT_IN_STRCPY:
1168 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1169 result = fold_builtin_strcpy (loc, callee,
1170 gimple_call_arg (stmt, 0),
1171 gimple_call_arg (stmt, 1),
1175 case BUILT_IN_STRNCPY:
1176 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1177 result = fold_builtin_strncpy (loc, callee,
1178 gimple_call_arg (stmt, 0),
1179 gimple_call_arg (stmt, 1),
1180 gimple_call_arg (stmt, 2),
1184 case BUILT_IN_FPUTS:
1186 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1187 gimple_call_arg (stmt, 1),
1188 ignore, false, val[0]);
1191 case BUILT_IN_FPUTS_UNLOCKED:
1193 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1194 gimple_call_arg (stmt, 1),
1195 ignore, true, val[0]);
1198 case BUILT_IN_MEMCPY_CHK:
1199 case BUILT_IN_MEMPCPY_CHK:
1200 case BUILT_IN_MEMMOVE_CHK:
1201 case BUILT_IN_MEMSET_CHK:
1202 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1203 result = fold_builtin_memory_chk (loc, callee,
1204 gimple_call_arg (stmt, 0),
1205 gimple_call_arg (stmt, 1),
1206 gimple_call_arg (stmt, 2),
1207 gimple_call_arg (stmt, 3),
1209 DECL_FUNCTION_CODE (callee));
1212 case BUILT_IN_STRCPY_CHK:
1213 case BUILT_IN_STPCPY_CHK:
1214 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1215 result = fold_builtin_stxcpy_chk (loc, callee,
1216 gimple_call_arg (stmt, 0),
1217 gimple_call_arg (stmt, 1),
1218 gimple_call_arg (stmt, 2),
1220 DECL_FUNCTION_CODE (callee));
1223 case BUILT_IN_STRNCPY_CHK:
1224 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1225 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1226 gimple_call_arg (stmt, 1),
1227 gimple_call_arg (stmt, 2),
1228 gimple_call_arg (stmt, 3),
1232 case BUILT_IN_SNPRINTF_CHK:
1233 case BUILT_IN_VSNPRINTF_CHK:
1234 if (val[1] && is_gimple_val (val[1]))
1235 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1236 DECL_FUNCTION_CODE (callee));
1243 if (result && ignore)
1244 result = fold_ignored_result (result);
1248 /* Return the first of the base binfos of BINFO that has virtual functions. */
1251 get_first_base_binfo_with_virtuals (tree binfo)
1256 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1257 if (BINFO_VIRTUALS (base_binfo))
1264 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1265 it is found or NULL_TREE if it is not. */
1268 get_base_binfo_for_type (tree binfo, tree type)
1272 tree res = NULL_TREE;
1274 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1275 if (TREE_TYPE (base_binfo) == type)
1284 /* Return a binfo describing the part of object referenced by expression REF.
1285 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1286 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1287 a simple declaration, indirect reference or an SSA_NAME. If the function
1288 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1289 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1290 Otherwise the first non-artificial field declaration or the base declaration
1291 will be examined to get the encapsulating type. */
1294 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1298 if (TREE_CODE (ref) == COMPONENT_REF)
1301 tree binfo, base_binfo;
1302 tree field = TREE_OPERAND (ref, 1);
1304 if (!DECL_ARTIFICIAL (field))
1306 tree type = TREE_TYPE (field);
1307 if (TREE_CODE (type) == RECORD_TYPE)
1308 return TYPE_BINFO (type);
1313 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1314 binfo = TYPE_BINFO (par_type);
1316 || BINFO_N_BASE_BINFOS (binfo) == 0)
1319 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1320 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1324 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1326 /* Get descendant binfo. */
1329 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1332 ref = TREE_OPERAND (ref, 0);
1334 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1335 return TYPE_BINFO (TREE_TYPE (ref));
1336 else if (known_binfo
1337 && (TREE_CODE (ref) == SSA_NAME
1338 || TREE_CODE (ref) == MEM_REF))
1345 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1346 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1347 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1350 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1355 v = BINFO_VIRTUALS (known_binfo);
1359 i += (TARGET_VTABLE_USES_DESCRIPTORS
1360 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1364 fndecl = TREE_VALUE (v);
1365 return build_fold_addr_expr (fndecl);
1369 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1370 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1371 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1372 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1373 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1376 gimple_fold_obj_type_ref (tree ref, tree known_type)
1378 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1379 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1382 if (TREE_CODE (obj) == ADDR_EXPR)
1383 obj = TREE_OPERAND (obj, 0);
1385 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1388 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1389 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1395 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1396 The statement may be replaced by another statement, e.g., if the call
1397 simplifies to a constant value. Return true if any changes were made.
1398 It is assumed that the operands have been previously folded. */
1401 fold_gimple_call (gimple_stmt_iterator *gsi)
1403 gimple stmt = gsi_stmt (*gsi);
1405 tree callee = gimple_call_fndecl (stmt);
1407 /* Check for builtins that CCP can handle using information not
1408 available in the generic fold routines. */
1409 if (callee && DECL_BUILT_IN (callee))
1411 tree result = gimple_fold_builtin (stmt);
1415 if (!update_call_from_tree (gsi, result))
1416 gimplify_and_update_call_from_tree (gsi, result);
1422 /* ??? Should perhaps do this in fold proper. However, doing it
1423 there requires that we create a new CALL_EXPR, and that requires
1424 copying EH region info to the new node. Easier to just do it
1425 here where we can just smash the call operand. */
1426 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1427 callee = gimple_call_fn (stmt);
1428 if (TREE_CODE (callee) == OBJ_TYPE_REF
1429 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1433 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1436 gimple_call_set_fn (stmt, t);
1445 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1446 distinguishes both cases. */
1449 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1451 bool changed = false;
1452 gimple stmt = gsi_stmt (*gsi);
1455 /* Fold the main computation performed by the statement. */
1456 switch (gimple_code (stmt))
1460 unsigned old_num_ops = gimple_num_ops (stmt);
1461 tree new_rhs = fold_gimple_assign (gsi);
1462 tree lhs = gimple_assign_lhs (stmt);
1464 && !useless_type_conversion_p (TREE_TYPE (lhs),
1465 TREE_TYPE (new_rhs)))
1466 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1469 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1471 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1478 changed |= fold_gimple_cond (stmt);
1482 /* Fold *& in call arguments. */
1483 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1484 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1486 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1489 gimple_call_set_arg (stmt, i, tmp);
1493 /* The entire statement may be replaced in this case. */
1495 changed |= fold_gimple_call (gsi);
1499 /* Fold *& in asm operands. */
1500 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1502 tree link = gimple_asm_output_op (stmt, i);
1503 tree op = TREE_VALUE (link);
1504 if (REFERENCE_CLASS_P (op)
1505 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1507 TREE_VALUE (link) = op;
1511 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1513 tree link = gimple_asm_input_op (stmt, i);
1514 tree op = TREE_VALUE (link);
1515 if (REFERENCE_CLASS_P (op)
1516 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1518 TREE_VALUE (link) = op;
1527 stmt = gsi_stmt (*gsi);
1529 /* Fold *& on the lhs. */
1530 if (gimple_has_lhs (stmt))
1532 tree lhs = gimple_get_lhs (stmt);
1533 if (lhs && REFERENCE_CLASS_P (lhs))
1535 tree new_lhs = maybe_fold_reference (lhs, true);
1538 gimple_set_lhs (stmt, new_lhs);
1547 /* Fold the statement pointed to by GSI. In some cases, this function may
1548 replace the whole statement with a new one. Returns true iff folding
1550 The statement pointed to by GSI should be in valid gimple form but may
1551 be in unfolded state as resulting from for example constant propagation
1552 which can produce *&x = 0. */
1555 fold_stmt (gimple_stmt_iterator *gsi)
1557 return fold_stmt_1 (gsi, false);
1560 /* Perform the minimal folding on statement STMT. Only operations like
1561 *&x created by constant propagation are handled. The statement cannot
1562 be replaced with a new one. Return true if the statement was
1563 changed, false otherwise.
1564 The statement STMT should be in valid gimple form but may
1565 be in unfolded state as resulting from for example constant propagation
1566 which can produce *&x = 0. */
1569 fold_stmt_inplace (gimple stmt)
1571 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1572 bool changed = fold_stmt_1 (&gsi, true);
1573 gcc_assert (gsi_stmt (gsi) == stmt);
1577 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1578 if EXPR is null or we don't know how.
1579 If non-null, the result always has boolean type. */
1582 canonicalize_bool (tree expr, bool invert)
1588 if (integer_nonzerop (expr))
1589 return boolean_false_node;
1590 else if (integer_zerop (expr))
1591 return boolean_true_node;
1592 else if (TREE_CODE (expr) == SSA_NAME)
1593 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1594 build_int_cst (TREE_TYPE (expr), 0));
1595 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1596 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1598 TREE_OPERAND (expr, 0),
1599 TREE_OPERAND (expr, 1));
1605 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1607 if (integer_nonzerop (expr))
1608 return boolean_true_node;
1609 else if (integer_zerop (expr))
1610 return boolean_false_node;
1611 else if (TREE_CODE (expr) == SSA_NAME)
1612 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1613 build_int_cst (TREE_TYPE (expr), 0));
1614 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1615 return fold_build2 (TREE_CODE (expr),
1617 TREE_OPERAND (expr, 0),
1618 TREE_OPERAND (expr, 1));
1624 /* Check to see if a boolean expression EXPR is logically equivalent to the
1625 comparison (OP1 CODE OP2). Check for various identities involving
1629 same_bool_comparison_p (const_tree expr, enum tree_code code,
1630 const_tree op1, const_tree op2)
1634 /* The obvious case. */
1635 if (TREE_CODE (expr) == code
1636 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1637 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1640 /* Check for comparing (name, name != 0) and the case where expr
1641 is an SSA_NAME with a definition matching the comparison. */
1642 if (TREE_CODE (expr) == SSA_NAME
1643 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1645 if (operand_equal_p (expr, op1, 0))
1646 return ((code == NE_EXPR && integer_zerop (op2))
1647 || (code == EQ_EXPR && integer_nonzerop (op2)));
1648 s = SSA_NAME_DEF_STMT (expr);
1649 if (is_gimple_assign (s)
1650 && gimple_assign_rhs_code (s) == code
1651 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1652 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1656 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1657 of name is a comparison, recurse. */
1658 if (TREE_CODE (op1) == SSA_NAME
1659 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1661 s = SSA_NAME_DEF_STMT (op1);
1662 if (is_gimple_assign (s)
1663 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1665 enum tree_code c = gimple_assign_rhs_code (s);
1666 if ((c == NE_EXPR && integer_zerop (op2))
1667 || (c == EQ_EXPR && integer_nonzerop (op2)))
1668 return same_bool_comparison_p (expr, c,
1669 gimple_assign_rhs1 (s),
1670 gimple_assign_rhs2 (s));
1671 if ((c == EQ_EXPR && integer_zerop (op2))
1672 || (c == NE_EXPR && integer_nonzerop (op2)))
1673 return same_bool_comparison_p (expr,
1674 invert_tree_comparison (c, false),
1675 gimple_assign_rhs1 (s),
1676 gimple_assign_rhs2 (s));
1682 /* Check to see if two boolean expressions OP1 and OP2 are logically
1686 same_bool_result_p (const_tree op1, const_tree op2)
1688 /* Simple cases first. */
1689 if (operand_equal_p (op1, op2, 0))
1692 /* Check the cases where at least one of the operands is a comparison.
1693 These are a bit smarter than operand_equal_p in that they apply some
1694 identifies on SSA_NAMEs. */
1695 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1696 && same_bool_comparison_p (op1, TREE_CODE (op2),
1697 TREE_OPERAND (op2, 0),
1698 TREE_OPERAND (op2, 1)))
1700 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1701 && same_bool_comparison_p (op2, TREE_CODE (op1),
1702 TREE_OPERAND (op1, 0),
1703 TREE_OPERAND (op1, 1)))
1710 /* Forward declarations for some mutually recursive functions. */
1713 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1714 enum tree_code code2, tree op2a, tree op2b);
1716 and_var_with_comparison (tree var, bool invert,
1717 enum tree_code code2, tree op2a, tree op2b);
1719 and_var_with_comparison_1 (gimple stmt,
1720 enum tree_code code2, tree op2a, tree op2b);
1722 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1723 enum tree_code code2, tree op2a, tree op2b);
1725 or_var_with_comparison (tree var, bool invert,
1726 enum tree_code code2, tree op2a, tree op2b);
1728 or_var_with_comparison_1 (gimple stmt,
1729 enum tree_code code2, tree op2a, tree op2b);
1731 /* Helper function for and_comparisons_1: try to simplify the AND of the
1732 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1733 If INVERT is true, invert the value of the VAR before doing the AND.
1734 Return NULL_EXPR if we can't simplify this to a single expression. */
1737 and_var_with_comparison (tree var, bool invert,
1738 enum tree_code code2, tree op2a, tree op2b)
1741 gimple stmt = SSA_NAME_DEF_STMT (var);
1743 /* We can only deal with variables whose definitions are assignments. */
1744 if (!is_gimple_assign (stmt))
1747 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1748 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1749 Then we only have to consider the simpler non-inverted cases. */
1751 t = or_var_with_comparison_1 (stmt,
1752 invert_tree_comparison (code2, false),
1755 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1756 return canonicalize_bool (t, invert);
1759 /* Try to simplify the AND of the ssa variable defined by the assignment
1760 STMT with the comparison specified by (OP2A CODE2 OP2B).
1761 Return NULL_EXPR if we can't simplify this to a single expression. */
1764 and_var_with_comparison_1 (gimple stmt,
1765 enum tree_code code2, tree op2a, tree op2b)
1767 tree var = gimple_assign_lhs (stmt);
1768 tree true_test_var = NULL_TREE;
1769 tree false_test_var = NULL_TREE;
1770 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1772 /* Check for identities like (var AND (var == 0)) => false. */
1773 if (TREE_CODE (op2a) == SSA_NAME
1774 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1776 if ((code2 == NE_EXPR && integer_zerop (op2b))
1777 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1779 true_test_var = op2a;
1780 if (var == true_test_var)
1783 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1784 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1786 false_test_var = op2a;
1787 if (var == false_test_var)
1788 return boolean_false_node;
1792 /* If the definition is a comparison, recurse on it. */
1793 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1795 tree t = and_comparisons_1 (innercode,
1796 gimple_assign_rhs1 (stmt),
1797 gimple_assign_rhs2 (stmt),
1805 /* If the definition is an AND or OR expression, we may be able to
1806 simplify by reassociating. */
1807 if (innercode == TRUTH_AND_EXPR
1808 || innercode == TRUTH_OR_EXPR
1809 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1810 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1812 tree inner1 = gimple_assign_rhs1 (stmt);
1813 tree inner2 = gimple_assign_rhs2 (stmt);
1816 tree partial = NULL_TREE;
1817 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1819 /* Check for boolean identities that don't require recursive examination
1821 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1822 inner1 AND (inner1 OR inner2) => inner1
1823 !inner1 AND (inner1 AND inner2) => false
1824 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1825 Likewise for similar cases involving inner2. */
1826 if (inner1 == true_test_var)
1827 return (is_and ? var : inner1);
1828 else if (inner2 == true_test_var)
1829 return (is_and ? var : inner2);
1830 else if (inner1 == false_test_var)
1832 ? boolean_false_node
1833 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1834 else if (inner2 == false_test_var)
1836 ? boolean_false_node
1837 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1839 /* Next, redistribute/reassociate the AND across the inner tests.
1840 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1841 if (TREE_CODE (inner1) == SSA_NAME
1842 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1843 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1844 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1845 gimple_assign_rhs1 (s),
1846 gimple_assign_rhs2 (s),
1847 code2, op2a, op2b)))
1849 /* Handle the AND case, where we are reassociating:
1850 (inner1 AND inner2) AND (op2a code2 op2b)
1852 If the partial result t is a constant, we win. Otherwise
1853 continue on to try reassociating with the other inner test. */
1856 if (integer_onep (t))
1858 else if (integer_zerop (t))
1859 return boolean_false_node;
1862 /* Handle the OR case, where we are redistributing:
1863 (inner1 OR inner2) AND (op2a code2 op2b)
1864 => (t OR (inner2 AND (op2a code2 op2b))) */
1867 if (integer_onep (t))
1868 return boolean_true_node;
1870 /* Save partial result for later. */
1875 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1876 if (TREE_CODE (inner2) == SSA_NAME
1877 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1878 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1879 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1880 gimple_assign_rhs1 (s),
1881 gimple_assign_rhs2 (s),
1882 code2, op2a, op2b)))
1884 /* Handle the AND case, where we are reassociating:
1885 (inner1 AND inner2) AND (op2a code2 op2b)
1886 => (inner1 AND t) */
1889 if (integer_onep (t))
1891 else if (integer_zerop (t))
1892 return boolean_false_node;
1895 /* Handle the OR case. where we are redistributing:
1896 (inner1 OR inner2) AND (op2a code2 op2b)
1897 => (t OR (inner1 AND (op2a code2 op2b)))
1898 => (t OR partial) */
1901 if (integer_onep (t))
1902 return boolean_true_node;
1905 /* We already got a simplification for the other
1906 operand to the redistributed OR expression. The
1907 interesting case is when at least one is false.
1908 Or, if both are the same, we can apply the identity
1910 if (integer_zerop (partial))
1912 else if (integer_zerop (t))
1914 else if (same_bool_result_p (t, partial))
1923 /* Try to simplify the AND of two comparisons defined by
1924 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1925 If this can be done without constructing an intermediate value,
1926 return the resulting tree; otherwise NULL_TREE is returned.
1927 This function is deliberately asymmetric as it recurses on SSA_DEFs
1928 in the first comparison but not the second. */
1931 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1932 enum tree_code code2, tree op2a, tree op2b)
1934 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1935 if (operand_equal_p (op1a, op2a, 0)
1936 && operand_equal_p (op1b, op2b, 0))
1938 tree t = combine_comparisons (UNKNOWN_LOCATION,
1939 TRUTH_ANDIF_EXPR, code1, code2,
1940 boolean_type_node, op1a, op1b);
1945 /* Likewise the swapped case of the above. */
1946 if (operand_equal_p (op1a, op2b, 0)
1947 && operand_equal_p (op1b, op2a, 0))
1949 tree t = combine_comparisons (UNKNOWN_LOCATION,
1950 TRUTH_ANDIF_EXPR, code1,
1951 swap_tree_comparison (code2),
1952 boolean_type_node, op1a, op1b);
1957 /* If both comparisons are of the same value against constants, we might
1958 be able to merge them. */
1959 if (operand_equal_p (op1a, op2a, 0)
1960 && TREE_CODE (op1b) == INTEGER_CST
1961 && TREE_CODE (op2b) == INTEGER_CST)
1963 int cmp = tree_int_cst_compare (op1b, op2b);
1965 /* If we have (op1a == op1b), we should either be able to
1966 return that or FALSE, depending on whether the constant op1b
1967 also satisfies the other comparison against op2b. */
1968 if (code1 == EQ_EXPR)
1974 case EQ_EXPR: val = (cmp == 0); break;
1975 case NE_EXPR: val = (cmp != 0); break;
1976 case LT_EXPR: val = (cmp < 0); break;
1977 case GT_EXPR: val = (cmp > 0); break;
1978 case LE_EXPR: val = (cmp <= 0); break;
1979 case GE_EXPR: val = (cmp >= 0); break;
1980 default: done = false;
1985 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1987 return boolean_false_node;
1990 /* Likewise if the second comparison is an == comparison. */
1991 else if (code2 == EQ_EXPR)
1997 case EQ_EXPR: val = (cmp == 0); break;
1998 case NE_EXPR: val = (cmp != 0); break;
1999 case LT_EXPR: val = (cmp > 0); break;
2000 case GT_EXPR: val = (cmp < 0); break;
2001 case LE_EXPR: val = (cmp >= 0); break;
2002 case GE_EXPR: val = (cmp <= 0); break;
2003 default: done = false;
2008 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2010 return boolean_false_node;
2014 /* Same business with inequality tests. */
2015 else if (code1 == NE_EXPR)
2020 case EQ_EXPR: val = (cmp != 0); break;
2021 case NE_EXPR: val = (cmp == 0); break;
2022 case LT_EXPR: val = (cmp >= 0); break;
2023 case GT_EXPR: val = (cmp <= 0); break;
2024 case LE_EXPR: val = (cmp > 0); break;
2025 case GE_EXPR: val = (cmp < 0); break;
2030 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2032 else if (code2 == NE_EXPR)
2037 case EQ_EXPR: val = (cmp == 0); break;
2038 case NE_EXPR: val = (cmp != 0); break;
2039 case LT_EXPR: val = (cmp <= 0); break;
2040 case GT_EXPR: val = (cmp >= 0); break;
2041 case LE_EXPR: val = (cmp < 0); break;
2042 case GE_EXPR: val = (cmp > 0); break;
2047 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2050 /* Chose the more restrictive of two < or <= comparisons. */
2051 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2052 && (code2 == LT_EXPR || code2 == LE_EXPR))
2054 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2055 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2057 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2060 /* Likewise chose the more restrictive of two > or >= comparisons. */
2061 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2062 && (code2 == GT_EXPR || code2 == GE_EXPR))
2064 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2065 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2067 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2070 /* Check for singleton ranges. */
2072 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2073 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2074 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2076 /* Check for disjoint ranges. */
2078 && (code1 == LT_EXPR || code1 == LE_EXPR)
2079 && (code2 == GT_EXPR || code2 == GE_EXPR))
2080 return boolean_false_node;
2082 && (code1 == GT_EXPR || code1 == GE_EXPR)
2083 && (code2 == LT_EXPR || code2 == LE_EXPR))
2084 return boolean_false_node;
2087 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2088 NAME's definition is a truth value. See if there are any simplifications
2089 that can be done against the NAME's definition. */
2090 if (TREE_CODE (op1a) == SSA_NAME
2091 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2092 && (integer_zerop (op1b) || integer_onep (op1b)))
2094 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2095 || (code1 == NE_EXPR && integer_onep (op1b)));
2096 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2097 switch (gimple_code (stmt))
2100 /* Try to simplify by copy-propagating the definition. */
2101 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2104 /* If every argument to the PHI produces the same result when
2105 ANDed with the second comparison, we win.
2106 Do not do this unless the type is bool since we need a bool
2107 result here anyway. */
2108 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2110 tree result = NULL_TREE;
2112 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2114 tree arg = gimple_phi_arg_def (stmt, i);
2116 /* If this PHI has itself as an argument, ignore it.
2117 If all the other args produce the same result,
2119 if (arg == gimple_phi_result (stmt))
2121 else if (TREE_CODE (arg) == INTEGER_CST)
2123 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2126 result = boolean_false_node;
2127 else if (!integer_zerop (result))
2131 result = fold_build2 (code2, boolean_type_node,
2133 else if (!same_bool_comparison_p (result,
2137 else if (TREE_CODE (arg) == SSA_NAME)
2139 tree temp = and_var_with_comparison (arg, invert,
2145 else if (!same_bool_result_p (result, temp))
2161 /* Try to simplify the AND of two comparisons, specified by
2162 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2163 If this can be simplified to a single expression (without requiring
2164 introducing more SSA variables to hold intermediate values),
2165 return the resulting tree. Otherwise return NULL_TREE.
2166 If the result expression is non-null, it has boolean type. */
2169 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2170 enum tree_code code2, tree op2a, tree op2b)
2172 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2176 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2179 /* Helper function for or_comparisons_1: try to simplify the OR of the
2180 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2181 If INVERT is true, invert the value of VAR before doing the OR.
2182 Return NULL_EXPR if we can't simplify this to a single expression. */
2185 or_var_with_comparison (tree var, bool invert,
2186 enum tree_code code2, tree op2a, tree op2b)
2189 gimple stmt = SSA_NAME_DEF_STMT (var);
2191 /* We can only deal with variables whose definitions are assignments. */
2192 if (!is_gimple_assign (stmt))
2195 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2196 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2197 Then we only have to consider the simpler non-inverted cases. */
2199 t = and_var_with_comparison_1 (stmt,
2200 invert_tree_comparison (code2, false),
2203 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2204 return canonicalize_bool (t, invert);
2207 /* Try to simplify the OR of the ssa variable defined by the assignment
2208 STMT with the comparison specified by (OP2A CODE2 OP2B).
2209 Return NULL_EXPR if we can't simplify this to a single expression. */
2212 or_var_with_comparison_1 (gimple stmt,
2213 enum tree_code code2, tree op2a, tree op2b)
2215 tree var = gimple_assign_lhs (stmt);
2216 tree true_test_var = NULL_TREE;
2217 tree false_test_var = NULL_TREE;
2218 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2220 /* Check for identities like (var OR (var != 0)) => true . */
2221 if (TREE_CODE (op2a) == SSA_NAME
2222 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2224 if ((code2 == NE_EXPR && integer_zerop (op2b))
2225 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2227 true_test_var = op2a;
2228 if (var == true_test_var)
2231 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2232 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2234 false_test_var = op2a;
2235 if (var == false_test_var)
2236 return boolean_true_node;
2240 /* If the definition is a comparison, recurse on it. */
2241 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2243 tree t = or_comparisons_1 (innercode,
2244 gimple_assign_rhs1 (stmt),
2245 gimple_assign_rhs2 (stmt),
2253 /* If the definition is an AND or OR expression, we may be able to
2254 simplify by reassociating. */
2255 if (innercode == TRUTH_AND_EXPR
2256 || innercode == TRUTH_OR_EXPR
2257 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2258 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2260 tree inner1 = gimple_assign_rhs1 (stmt);
2261 tree inner2 = gimple_assign_rhs2 (stmt);
2264 tree partial = NULL_TREE;
2265 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2267 /* Check for boolean identities that don't require recursive examination
2269 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2270 inner1 OR (inner1 AND inner2) => inner1
2271 !inner1 OR (inner1 OR inner2) => true
2272 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2274 if (inner1 == true_test_var)
2275 return (is_or ? var : inner1);
2276 else if (inner2 == true_test_var)
2277 return (is_or ? var : inner2);
2278 else if (inner1 == false_test_var)
2281 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2282 else if (inner2 == false_test_var)
2285 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2287 /* Next, redistribute/reassociate the OR across the inner tests.
2288 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2289 if (TREE_CODE (inner1) == SSA_NAME
2290 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2291 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2292 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2293 gimple_assign_rhs1 (s),
2294 gimple_assign_rhs2 (s),
2295 code2, op2a, op2b)))
2297 /* Handle the OR case, where we are reassociating:
2298 (inner1 OR inner2) OR (op2a code2 op2b)
2300 If the partial result t is a constant, we win. Otherwise
2301 continue on to try reassociating with the other inner test. */
2302 if (innercode == TRUTH_OR_EXPR)
2304 if (integer_onep (t))
2305 return boolean_true_node;
2306 else if (integer_zerop (t))
2310 /* Handle the AND case, where we are redistributing:
2311 (inner1 AND inner2) OR (op2a code2 op2b)
2312 => (t AND (inner2 OR (op2a code op2b))) */
2315 if (integer_zerop (t))
2316 return boolean_false_node;
2318 /* Save partial result for later. */
2323 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2324 if (TREE_CODE (inner2) == SSA_NAME
2325 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2326 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2327 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2328 gimple_assign_rhs1 (s),
2329 gimple_assign_rhs2 (s),
2330 code2, op2a, op2b)))
2332 /* Handle the OR case, where we are reassociating:
2333 (inner1 OR inner2) OR (op2a code2 op2b)
2335 if (innercode == TRUTH_OR_EXPR)
2337 if (integer_zerop (t))
2339 else if (integer_onep (t))
2340 return boolean_true_node;
2343 /* Handle the AND case, where we are redistributing:
2344 (inner1 AND inner2) OR (op2a code2 op2b)
2345 => (t AND (inner1 OR (op2a code2 op2b)))
2346 => (t AND partial) */
2349 if (integer_zerop (t))
2350 return boolean_false_node;
2353 /* We already got a simplification for the other
2354 operand to the redistributed AND expression. The
2355 interesting case is when at least one is true.
2356 Or, if both are the same, we can apply the identity
2357 (x AND x) == true. */
2358 if (integer_onep (partial))
2360 else if (integer_onep (t))
2362 else if (same_bool_result_p (t, partial))
2363 return boolean_true_node;
2371 /* Try to simplify the OR of two comparisons defined by
2372 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2373 If this can be done without constructing an intermediate value,
2374 return the resulting tree; otherwise NULL_TREE is returned.
2375 This function is deliberately asymmetric as it recurses on SSA_DEFs
2376 in the first comparison but not the second. */
2379 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2380 enum tree_code code2, tree op2a, tree op2b)
2382 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2383 if (operand_equal_p (op1a, op2a, 0)
2384 && operand_equal_p (op1b, op2b, 0))
2386 tree t = combine_comparisons (UNKNOWN_LOCATION,
2387 TRUTH_ORIF_EXPR, code1, code2,
2388 boolean_type_node, op1a, op1b);
2393 /* Likewise the swapped case of the above. */
2394 if (operand_equal_p (op1a, op2b, 0)
2395 && operand_equal_p (op1b, op2a, 0))
2397 tree t = combine_comparisons (UNKNOWN_LOCATION,
2398 TRUTH_ORIF_EXPR, code1,
2399 swap_tree_comparison (code2),
2400 boolean_type_node, op1a, op1b);
2405 /* If both comparisons are of the same value against constants, we might
2406 be able to merge them. */
2407 if (operand_equal_p (op1a, op2a, 0)
2408 && TREE_CODE (op1b) == INTEGER_CST
2409 && TREE_CODE (op2b) == INTEGER_CST)
2411 int cmp = tree_int_cst_compare (op1b, op2b);
2413 /* If we have (op1a != op1b), we should either be able to
2414 return that or TRUE, depending on whether the constant op1b
2415 also satisfies the other comparison against op2b. */
2416 if (code1 == NE_EXPR)
2422 case EQ_EXPR: val = (cmp == 0); break;
2423 case NE_EXPR: val = (cmp != 0); break;
2424 case LT_EXPR: val = (cmp < 0); break;
2425 case GT_EXPR: val = (cmp > 0); break;
2426 case LE_EXPR: val = (cmp <= 0); break;
2427 case GE_EXPR: val = (cmp >= 0); break;
2428 default: done = false;
2433 return boolean_true_node;
2435 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2438 /* Likewise if the second comparison is a != comparison. */
2439 else if (code2 == NE_EXPR)
2445 case EQ_EXPR: val = (cmp == 0); break;
2446 case NE_EXPR: val = (cmp != 0); break;
2447 case LT_EXPR: val = (cmp > 0); break;
2448 case GT_EXPR: val = (cmp < 0); break;
2449 case LE_EXPR: val = (cmp >= 0); break;
2450 case GE_EXPR: val = (cmp <= 0); break;
2451 default: done = false;
2456 return boolean_true_node;
2458 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2462 /* See if an equality test is redundant with the other comparison. */
2463 else if (code1 == EQ_EXPR)
2468 case EQ_EXPR: val = (cmp == 0); break;
2469 case NE_EXPR: val = (cmp != 0); break;
2470 case LT_EXPR: val = (cmp < 0); break;
2471 case GT_EXPR: val = (cmp > 0); break;
2472 case LE_EXPR: val = (cmp <= 0); break;
2473 case GE_EXPR: val = (cmp >= 0); break;
2478 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2480 else if (code2 == EQ_EXPR)
2485 case EQ_EXPR: val = (cmp == 0); break;
2486 case NE_EXPR: val = (cmp != 0); break;
2487 case LT_EXPR: val = (cmp > 0); break;
2488 case GT_EXPR: val = (cmp < 0); break;
2489 case LE_EXPR: val = (cmp >= 0); break;
2490 case GE_EXPR: val = (cmp <= 0); break;
2495 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2498 /* Chose the less restrictive of two < or <= comparisons. */
2499 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2500 && (code2 == LT_EXPR || code2 == LE_EXPR))
2502 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2503 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2505 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2508 /* Likewise chose the less restrictive of two > or >= comparisons. */
2509 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2510 && (code2 == GT_EXPR || code2 == GE_EXPR))
2512 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2513 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2515 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2518 /* Check for singleton ranges. */
2520 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2521 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2522 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2524 /* Check for less/greater pairs that don't restrict the range at all. */
2526 && (code1 == LT_EXPR || code1 == LE_EXPR)
2527 && (code2 == GT_EXPR || code2 == GE_EXPR))
2528 return boolean_true_node;
2530 && (code1 == GT_EXPR || code1 == GE_EXPR)
2531 && (code2 == LT_EXPR || code2 == LE_EXPR))
2532 return boolean_true_node;
2535 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2536 NAME's definition is a truth value. See if there are any simplifications
2537 that can be done against the NAME's definition. */
2538 if (TREE_CODE (op1a) == SSA_NAME
2539 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2540 && (integer_zerop (op1b) || integer_onep (op1b)))
2542 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2543 || (code1 == NE_EXPR && integer_onep (op1b)));
2544 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2545 switch (gimple_code (stmt))
2548 /* Try to simplify by copy-propagating the definition. */
2549 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2552 /* If every argument to the PHI produces the same result when
2553 ORed with the second comparison, we win.
2554 Do not do this unless the type is bool since we need a bool
2555 result here anyway. */
2556 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2558 tree result = NULL_TREE;
2560 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2562 tree arg = gimple_phi_arg_def (stmt, i);
2564 /* If this PHI has itself as an argument, ignore it.
2565 If all the other args produce the same result,
2567 if (arg == gimple_phi_result (stmt))
2569 else if (TREE_CODE (arg) == INTEGER_CST)
2571 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2574 result = boolean_true_node;
2575 else if (!integer_onep (result))
2579 result = fold_build2 (code2, boolean_type_node,
2581 else if (!same_bool_comparison_p (result,
2585 else if (TREE_CODE (arg) == SSA_NAME)
2587 tree temp = or_var_with_comparison (arg, invert,
2593 else if (!same_bool_result_p (result, temp))
2609 /* Try to simplify the OR of two comparisons, specified by
2610 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2611 If this can be simplified to a single expression (without requiring
2612 introducing more SSA variables to hold intermediate values),
2613 return the resulting tree. Otherwise return NULL_TREE.
2614 If the result expression is non-null, it has boolean type. */
2617 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2618 enum tree_code code2, tree op2a, tree op2b)
2620 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2624 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);