1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
39 get_symbol_constant_value (tree sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
45 tree val = DECL_INITIAL (sym);
49 if (is_gimple_min_invariant (val))
51 if (TREE_CODE (val) == ADDR_EXPR)
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
83 may_propagate_address_into_dereference (tree addr, tree deref)
85 gcc_assert (TREE_CODE (deref) == MEM_REF
86 && TREE_CODE (addr) == ADDR_EXPR);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement.
113 LOC is the location of the original expression. */
116 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
118 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
119 tree array_type, elt_type, elt_size;
122 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
123 measured in units of the size of elements type) from that ARRAY_REF).
124 We can't do anything if either is variable.
126 The case we handle here is *(&A[N]+O). */
127 if (TREE_CODE (base) == ARRAY_REF)
129 tree low_bound = array_ref_low_bound (base);
131 elt_offset = TREE_OPERAND (base, 1);
132 if (TREE_CODE (low_bound) != INTEGER_CST
133 || TREE_CODE (elt_offset) != INTEGER_CST)
136 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
137 base = TREE_OPERAND (base, 0);
140 /* Ignore stupid user tricks of indexing non-array variables. */
141 array_type = TREE_TYPE (base);
142 if (TREE_CODE (array_type) != ARRAY_TYPE)
144 elt_type = TREE_TYPE (array_type);
146 /* Use signed size type for intermediate computation on the index. */
147 idx_type = ssizetype;
149 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
150 element type (so we can use the alignment if it's not constant).
151 Otherwise, compute the offset as an index by using a division. If the
152 division isn't exact, then don't do anything. */
153 elt_size = TYPE_SIZE_UNIT (elt_type);
156 if (integer_zerop (offset))
158 if (TREE_CODE (elt_size) != INTEGER_CST)
159 elt_size = size_int (TYPE_ALIGN (elt_type));
161 idx = build_int_cst (idx_type, 0);
165 unsigned HOST_WIDE_INT lquo, lrem;
166 HOST_WIDE_INT hquo, hrem;
169 /* The final array offset should be signed, so we need
170 to sign-extend the (possibly pointer) offset here
171 and use signed division. */
172 soffset = double_int_sext (tree_to_double_int (offset),
173 TYPE_PRECISION (TREE_TYPE (offset)));
174 if (TREE_CODE (elt_size) != INTEGER_CST
175 || div_and_round_double (TRUNC_DIV_EXPR, 0,
176 soffset.low, soffset.high,
177 TREE_INT_CST_LOW (elt_size),
178 TREE_INT_CST_HIGH (elt_size),
179 &lquo, &hquo, &lrem, &hrem)
183 idx = build_int_cst_wide (idx_type, lquo, hquo);
186 /* Assume the low bound is zero. If there is a domain type, get the
187 low bound, if any, convert the index into that type, and add the
189 min_idx = build_int_cst (idx_type, 0);
190 domain_type = TYPE_DOMAIN (array_type);
193 idx_type = domain_type;
194 if (TYPE_MIN_VALUE (idx_type))
195 min_idx = TYPE_MIN_VALUE (idx_type);
197 min_idx = fold_convert (idx_type, min_idx);
199 if (TREE_CODE (min_idx) != INTEGER_CST)
202 elt_offset = fold_convert (idx_type, elt_offset);
205 if (!integer_zerop (min_idx))
206 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
207 if (!integer_zerop (elt_offset))
208 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
210 /* Make sure to possibly truncate late after offsetting. */
211 idx = fold_convert (idx_type, idx);
213 /* We don't want to construct access past array bounds. For example
216 should not be simplified into (*c)[14] or tree-vrp will
218 This is only an issue for multi-dimensional arrays. */
219 if (TREE_CODE (elt_type) == ARRAY_TYPE
222 if (TYPE_MAX_VALUE (domain_type)
223 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
224 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
226 else if (TYPE_MIN_VALUE (domain_type)
227 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
228 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
230 else if (compare_tree_int (idx, 0) < 0)
235 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
236 SET_EXPR_LOCATION (t, loc);
242 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
243 LOC is the location of original expression.
245 Before attempting the conversion strip off existing ADDR_EXPRs. */
248 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
254 if (TREE_CODE (base) != ADDR_EXPR)
257 base = TREE_OPERAND (base, 0);
258 if (types_compatible_p (orig_type, TREE_TYPE (base))
259 && integer_zerop (offset))
262 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
263 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
268 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
269 LOC is the location of the original expression. */
272 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
278 if (TREE_CODE (addr) != ADDR_EXPR)
280 base = TREE_OPERAND (addr, 0);
281 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
284 ret = build_fold_addr_expr (ret);
285 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
287 SET_EXPR_LOCATION (ret, loc);
294 /* A quaint feature extant in our address arithmetic is that there
295 can be hidden type changes here. The type of the result need
296 not be the same as the type of the input pointer.
298 What we're after here is an expression of the form
299 (T *)(&array + const)
300 where array is OP0, const is OP1, RES_TYPE is T and
301 the cast doesn't actually exist, but is implicit in the
302 type of the POINTER_PLUS_EXPR. We'd like to turn this into
304 which may be able to propagate further. */
307 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
312 /* The first operand should be an ADDR_EXPR. */
313 if (TREE_CODE (op0) != ADDR_EXPR)
315 op0 = TREE_OPERAND (op0, 0);
317 /* It had better be a constant. */
318 if (TREE_CODE (op1) != INTEGER_CST)
320 /* Or op0 should now be A[0] and the non-constant offset defined
321 via a multiplication by the array element size. */
322 if (TREE_CODE (op0) == ARRAY_REF
323 /* As we will end up creating a variable index array access
324 in the outermost array dimension make sure there isn't
325 a more inner array that the index could overflow to. */
326 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
327 && integer_zerop (TREE_OPERAND (op0, 1))
328 && TREE_CODE (op1) == SSA_NAME)
330 gimple offset_def = SSA_NAME_DEF_STMT (op1);
331 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
332 if (!host_integerp (elsz, 1)
333 || !is_gimple_assign (offset_def))
336 /* Do not build array references of something that we can't
337 see the true number of array dimensions for. */
338 if (!DECL_P (TREE_OPERAND (op0, 0))
339 && !handled_component_p (TREE_OPERAND (op0, 0)))
342 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
343 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
344 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
345 return build_fold_addr_expr
346 (build4 (ARRAY_REF, TREE_TYPE (op0),
347 TREE_OPERAND (op0, 0),
348 gimple_assign_rhs1 (offset_def),
349 TREE_OPERAND (op0, 2),
350 TREE_OPERAND (op0, 3)));
351 else if (integer_onep (elsz)
352 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
353 return build_fold_addr_expr
354 (build4 (ARRAY_REF, TREE_TYPE (op0),
355 TREE_OPERAND (op0, 0),
357 TREE_OPERAND (op0, 2),
358 TREE_OPERAND (op0, 3)));
360 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
362 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
363 && TREE_CODE (op1) == SSA_NAME)
365 gimple offset_def = SSA_NAME_DEF_STMT (op1);
366 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
367 if (!host_integerp (elsz, 1)
368 || !is_gimple_assign (offset_def))
371 /* Do not build array references of something that we can't
372 see the true number of array dimensions for. */
374 && !handled_component_p (op0))
377 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
378 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
379 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
380 return build_fold_addr_expr
381 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
382 op0, gimple_assign_rhs1 (offset_def),
383 integer_zero_node, NULL_TREE));
384 else if (integer_onep (elsz)
385 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
386 return build_fold_addr_expr
387 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
389 integer_zero_node, NULL_TREE));
395 /* If the first operand is an ARRAY_REF, expand it so that we can fold
396 the offset into it. */
397 while (TREE_CODE (op0) == ARRAY_REF)
399 tree array_obj = TREE_OPERAND (op0, 0);
400 tree array_idx = TREE_OPERAND (op0, 1);
401 tree elt_type = TREE_TYPE (op0);
402 tree elt_size = TYPE_SIZE_UNIT (elt_type);
405 if (TREE_CODE (array_idx) != INTEGER_CST)
407 if (TREE_CODE (elt_size) != INTEGER_CST)
410 /* Un-bias the index by the min index of the array type. */
411 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
414 min_idx = TYPE_MIN_VALUE (min_idx);
417 if (TREE_CODE (min_idx) != INTEGER_CST)
420 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
421 if (!integer_zerop (min_idx))
422 array_idx = int_const_binop (MINUS_EXPR, array_idx,
427 /* Convert the index to a byte offset. */
428 array_idx = fold_convert (sizetype, array_idx);
429 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
431 /* Update the operands for the next round, or for folding. */
432 op1 = int_const_binop (PLUS_EXPR,
437 ptd_type = TREE_TYPE (res_type);
438 /* If we want a pointer to void, reconstruct the reference from the
439 array element type. A pointer to that can be trivially converted
440 to void *. This happens as we fold (void *)(ptr p+ off). */
441 if (VOID_TYPE_P (ptd_type)
442 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
443 ptd_type = TREE_TYPE (TREE_TYPE (op0));
445 /* At which point we can try some of the same things as for indirects. */
446 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
449 t = build_fold_addr_expr (t);
450 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
452 SET_EXPR_LOCATION (t, loc);
458 /* Subroutine of fold_stmt. We perform several simplifications of the
459 memory reference tree EXPR and make sure to re-gimplify them properly
460 after propagation of constant addresses. IS_LHS is true if the
461 reference is supposed to be an lvalue. */
464 maybe_fold_reference (tree expr, bool is_lhs)
468 if (TREE_CODE (expr) == ARRAY_REF
471 tree tem = fold_read_from_constant_string (expr);
476 /* ??? We might want to open-code the relevant remaining cases
477 to avoid using the generic fold. */
478 if (handled_component_p (*t)
479 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
481 tree tem = fold (*t);
486 while (handled_component_p (*t))
487 t = &TREE_OPERAND (*t, 0);
489 /* Fold back MEM_REFs to reference trees. */
490 if (TREE_CODE (*t) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
492 && integer_zerop (TREE_OPERAND (*t, 1))
493 && (TREE_THIS_VOLATILE (*t)
494 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
495 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
496 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
497 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
498 /* We have to look out here to not drop a required conversion
499 from the rhs to the lhs if is_lhs, but we don't have the
500 rhs here to verify that. Thus require strict type
502 && types_compatible_p (TREE_TYPE (*t),
503 TREE_TYPE (TREE_OPERAND
504 (TREE_OPERAND (*t, 0), 0))))
507 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
508 tem = maybe_fold_reference (expr, is_lhs);
513 /* Canonicalize MEM_REFs invariant address operand. */
514 else if (TREE_CODE (*t) == MEM_REF
515 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
516 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
517 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
519 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
520 TREE_OPERAND (*t, 0),
521 TREE_OPERAND (*t, 1));
525 tem = maybe_fold_reference (expr, is_lhs);
531 else if (TREE_CODE (*t) == TARGET_MEM_REF)
533 tree tem = maybe_fold_tmr (*t);
537 tem = maybe_fold_reference (expr, is_lhs);
546 tree tem = get_symbol_constant_value (*t);
548 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
550 *t = unshare_expr (tem);
551 tem = maybe_fold_reference (expr, is_lhs);
562 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
563 replacement rhs for the statement or NULL_TREE if no simplification
564 could be made. It is assumed that the operands have been previously
568 fold_gimple_assign (gimple_stmt_iterator *si)
570 gimple stmt = gsi_stmt (*si);
571 enum tree_code subcode = gimple_assign_rhs_code (stmt);
572 location_t loc = gimple_location (stmt);
574 tree result = NULL_TREE;
576 switch (get_gimple_rhs_class (subcode))
578 case GIMPLE_SINGLE_RHS:
580 tree rhs = gimple_assign_rhs1 (stmt);
582 /* Try to fold a conditional expression. */
583 if (TREE_CODE (rhs) == COND_EXPR)
585 tree op0 = COND_EXPR_COND (rhs);
588 location_t cond_loc = EXPR_LOCATION (rhs);
590 if (COMPARISON_CLASS_P (op0))
592 fold_defer_overflow_warnings ();
593 tem = fold_binary_loc (cond_loc,
594 TREE_CODE (op0), TREE_TYPE (op0),
595 TREE_OPERAND (op0, 0),
596 TREE_OPERAND (op0, 1));
597 /* This is actually a conditional expression, not a GIMPLE
598 conditional statement, however, the valid_gimple_rhs_p
599 test still applies. */
600 set = (tem && is_gimple_condexpr (tem)
601 && valid_gimple_rhs_p (tem));
602 fold_undefer_overflow_warnings (set, stmt, 0);
604 else if (is_gimple_min_invariant (op0))
613 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
614 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
617 else if (REFERENCE_CLASS_P (rhs))
618 return maybe_fold_reference (rhs, false);
620 else if (TREE_CODE (rhs) == ADDR_EXPR)
622 tree ref = TREE_OPERAND (rhs, 0);
623 tree tem = maybe_fold_reference (ref, true);
625 && TREE_CODE (tem) == MEM_REF
626 && integer_zerop (TREE_OPERAND (tem, 1)))
627 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
629 result = fold_convert (TREE_TYPE (rhs),
630 build_fold_addr_expr_loc (loc, tem));
631 else if (TREE_CODE (ref) == MEM_REF
632 && integer_zerop (TREE_OPERAND (ref, 1)))
633 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
636 else if (TREE_CODE (rhs) == CONSTRUCTOR
637 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
638 && (CONSTRUCTOR_NELTS (rhs)
639 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
641 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
645 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
646 if (TREE_CODE (val) != INTEGER_CST
647 && TREE_CODE (val) != REAL_CST
648 && TREE_CODE (val) != FIXED_CST)
651 return build_vector_from_ctor (TREE_TYPE (rhs),
652 CONSTRUCTOR_ELTS (rhs));
655 else if (DECL_P (rhs))
656 return unshare_expr (get_symbol_constant_value (rhs));
658 /* If we couldn't fold the RHS, hand over to the generic
660 if (result == NULL_TREE)
663 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
664 that may have been added by fold, and "useless" type
665 conversions that might now be apparent due to propagation. */
666 STRIP_USELESS_TYPE_CONVERSION (result);
668 if (result != rhs && valid_gimple_rhs_p (result))
675 case GIMPLE_UNARY_RHS:
677 tree rhs = gimple_assign_rhs1 (stmt);
679 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
682 /* If the operation was a conversion do _not_ mark a
683 resulting constant with TREE_OVERFLOW if the original
684 constant was not. These conversions have implementation
685 defined behavior and retaining the TREE_OVERFLOW flag
686 here would confuse later passes such as VRP. */
687 if (CONVERT_EXPR_CODE_P (subcode)
688 && TREE_CODE (result) == INTEGER_CST
689 && TREE_CODE (rhs) == INTEGER_CST)
690 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
692 STRIP_USELESS_TYPE_CONVERSION (result);
693 if (valid_gimple_rhs_p (result))
696 else if (CONVERT_EXPR_CODE_P (subcode)
697 && POINTER_TYPE_P (gimple_expr_type (stmt))
698 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
700 tree type = gimple_expr_type (stmt);
701 tree t = maybe_fold_offset_to_address (loc,
702 gimple_assign_rhs1 (stmt),
703 integer_zero_node, type);
710 case GIMPLE_BINARY_RHS:
711 /* Try to fold pointer addition. */
712 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
714 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
715 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
717 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
718 if (!useless_type_conversion_p
719 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
720 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
722 result = maybe_fold_stmt_addition (gimple_location (stmt),
724 gimple_assign_rhs1 (stmt),
725 gimple_assign_rhs2 (stmt));
729 result = fold_binary_loc (loc, subcode,
730 TREE_TYPE (gimple_assign_lhs (stmt)),
731 gimple_assign_rhs1 (stmt),
732 gimple_assign_rhs2 (stmt));
736 STRIP_USELESS_TYPE_CONVERSION (result);
737 if (valid_gimple_rhs_p (result))
740 /* Fold might have produced non-GIMPLE, so if we trust it blindly
741 we lose canonicalization opportunities. Do not go again
742 through fold here though, or the same non-GIMPLE will be
744 if (commutative_tree_code (subcode)
745 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
746 gimple_assign_rhs2 (stmt), false))
747 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
748 gimple_assign_rhs2 (stmt),
749 gimple_assign_rhs1 (stmt));
753 case GIMPLE_TERNARY_RHS:
754 result = fold_ternary_loc (loc, subcode,
755 TREE_TYPE (gimple_assign_lhs (stmt)),
756 gimple_assign_rhs1 (stmt),
757 gimple_assign_rhs2 (stmt),
758 gimple_assign_rhs3 (stmt));
762 STRIP_USELESS_TYPE_CONVERSION (result);
763 if (valid_gimple_rhs_p (result))
766 /* Fold might have produced non-GIMPLE, so if we trust it blindly
767 we lose canonicalization opportunities. Do not go again
768 through fold here though, or the same non-GIMPLE will be
770 if (commutative_ternary_tree_code (subcode)
771 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
772 gimple_assign_rhs2 (stmt), false))
773 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
774 gimple_assign_rhs2 (stmt),
775 gimple_assign_rhs1 (stmt),
776 gimple_assign_rhs3 (stmt));
780 case GIMPLE_INVALID_RHS:
787 /* Attempt to fold a conditional statement. Return true if any changes were
788 made. We only attempt to fold the condition expression, and do not perform
789 any transformation that would require alteration of the cfg. It is
790 assumed that the operands have been previously folded. */
793 fold_gimple_cond (gimple stmt)
795 tree result = fold_binary_loc (gimple_location (stmt),
796 gimple_cond_code (stmt),
798 gimple_cond_lhs (stmt),
799 gimple_cond_rhs (stmt));
803 STRIP_USELESS_TYPE_CONVERSION (result);
804 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
806 gimple_cond_set_condition_from_tree (stmt, result);
814 /* Convert EXPR into a GIMPLE value suitable for substitution on the
815 RHS of an assignment. Insert the necessary statements before
816 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
817 is replaced. If the call is expected to produces a result, then it
818 is replaced by an assignment of the new RHS to the result variable.
819 If the result is to be ignored, then the call is replaced by a
820 GIMPLE_NOP. A proper VDEF chain is retained by making the first
821 VUSE and the last VDEF of the whole sequence be the same as the replaced
822 statement and using new SSA names for stores in between. */
825 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
828 tree tmp = NULL_TREE; /* Silence warning. */
829 gimple stmt, new_stmt;
830 gimple_stmt_iterator i;
831 gimple_seq stmts = gimple_seq_alloc();
832 struct gimplify_ctx gctx;
834 gimple laststore = NULL;
837 stmt = gsi_stmt (*si_p);
839 gcc_assert (is_gimple_call (stmt));
841 lhs = gimple_call_lhs (stmt);
842 reaching_vuse = gimple_vuse (stmt);
844 push_gimplify_context (&gctx);
846 if (lhs == NULL_TREE)
847 gimplify_and_add (expr, &stmts);
849 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
851 pop_gimplify_context (NULL);
853 if (gimple_has_location (stmt))
854 annotate_all_with_location (stmts, gimple_location (stmt));
856 /* The replacement can expose previously unreferenced variables. */
857 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
861 gsi_insert_before (si_p, last, GSI_NEW_STMT);
864 new_stmt = gsi_stmt (i);
865 if (gimple_in_ssa_p (cfun))
867 find_new_referenced_vars (new_stmt);
868 mark_symbols_for_renaming (new_stmt);
870 /* If the new statement has a VUSE, update it with exact SSA name we
871 know will reach this one. */
872 if (gimple_vuse (new_stmt))
874 /* If we've also seen a previous store create a new VDEF for
875 the latter one, and make that the new reaching VUSE. */
878 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
879 gimple_set_vdef (laststore, reaching_vuse);
880 update_stmt (laststore);
883 gimple_set_vuse (new_stmt, reaching_vuse);
884 gimple_set_modified (new_stmt, true);
886 if (gimple_assign_single_p (new_stmt)
887 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
889 laststore = new_stmt;
894 if (lhs == NULL_TREE)
896 /* If we replace a call without LHS that has a VDEF and our new
897 sequence ends with a store we must make that store have the same
898 vdef in order not to break the sequencing. This can happen
899 for instance when folding memcpy calls into assignments. */
900 if (gimple_vdef (stmt) && laststore)
902 gimple_set_vdef (laststore, gimple_vdef (stmt));
903 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
904 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
905 update_stmt (laststore);
907 else if (gimple_in_ssa_p (cfun))
909 unlink_stmt_vdef (stmt);
918 gsi_insert_before (si_p, last, GSI_NEW_STMT);
921 if (laststore && is_gimple_reg (lhs))
923 gimple_set_vdef (laststore, gimple_vdef (stmt));
924 update_stmt (laststore);
925 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
926 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
931 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
932 gimple_set_vdef (laststore, reaching_vuse);
933 update_stmt (laststore);
936 new_stmt = gimple_build_assign (lhs, tmp);
937 if (!is_gimple_reg (tmp))
938 gimple_set_vuse (new_stmt, reaching_vuse);
939 if (!is_gimple_reg (lhs))
941 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
942 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
943 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
947 gimple_set_location (new_stmt, gimple_location (stmt));
948 gsi_replace (si_p, new_stmt, false);
951 /* Return the string length, maximum string length or maximum value of
953 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
954 is not NULL and, for TYPE == 0, its value is not equal to the length
955 we determine or if we are unable to determine the length or value,
956 return false. VISITED is a bitmap of visited variables.
957 TYPE is 0 if string length should be returned, 1 for maximum string
958 length and 2 for maximum value ARG can have. */
961 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
966 if (TREE_CODE (arg) != SSA_NAME)
968 if (TREE_CODE (arg) == COND_EXPR)
969 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
970 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
971 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
972 else if (TREE_CODE (arg) == ADDR_EXPR
973 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
974 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
976 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
977 if (TREE_CODE (aop0) == INDIRECT_REF
978 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
979 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
980 length, visited, type);
986 if (TREE_CODE (val) != INTEGER_CST
987 || tree_int_cst_sgn (val) < 0)
991 val = c_strlen (arg, 1);
999 if (TREE_CODE (*length) != INTEGER_CST
1000 || TREE_CODE (val) != INTEGER_CST)
1003 if (tree_int_cst_lt (*length, val))
1007 else if (simple_cst_equal (val, *length) != 1)
1015 /* If we were already here, break the infinite cycle. */
1016 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1020 def_stmt = SSA_NAME_DEF_STMT (var);
1022 switch (gimple_code (def_stmt))
1025 /* The RHS of the statement defining VAR must either have a
1026 constant length or come from another SSA_NAME with a constant
1028 if (gimple_assign_single_p (def_stmt)
1029 || gimple_assign_unary_nop_p (def_stmt))
1031 tree rhs = gimple_assign_rhs1 (def_stmt);
1032 return get_maxval_strlen (rhs, length, visited, type);
1038 /* All the arguments of the PHI node must have the same constant
1042 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1044 tree arg = gimple_phi_arg (def_stmt, i)->def;
1046 /* If this PHI has itself as an argument, we cannot
1047 determine the string length of this argument. However,
1048 if we can find a constant string length for the other
1049 PHI args then we can still be sure that this is a
1050 constant string length. So be optimistic and just
1051 continue with the next argument. */
1052 if (arg == gimple_phi_result (def_stmt))
1055 if (!get_maxval_strlen (arg, length, visited, type))
1067 /* Fold builtin call in statement STMT. Returns a simplified tree.
1068 We may return a non-constant expression, including another call
1069 to a different function and with different arguments, e.g.,
1070 substituting memcpy for strcpy when the string length is known.
1071 Note that some builtins expand into inline code that may not
1072 be valid in GIMPLE. Callers must take care. */
1075 gimple_fold_builtin (gimple stmt)
1077 tree result, val[3];
1083 location_t loc = gimple_location (stmt);
1085 gcc_assert (is_gimple_call (stmt));
1087 ignore = (gimple_call_lhs (stmt) == NULL);
1089 /* First try the generic builtin folder. If that succeeds, return the
1091 result = fold_call_stmt (stmt, ignore);
1095 STRIP_NOPS (result);
1099 /* Ignore MD builtins. */
1100 callee = gimple_call_fndecl (stmt);
1101 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1104 /* If the builtin could not be folded, and it has no argument list,
1106 nargs = gimple_call_num_args (stmt);
1110 /* Limit the work only for builtins we know how to simplify. */
1111 switch (DECL_FUNCTION_CODE (callee))
1113 case BUILT_IN_STRLEN:
1114 case BUILT_IN_FPUTS:
1115 case BUILT_IN_FPUTS_UNLOCKED:
1119 case BUILT_IN_STRCPY:
1120 case BUILT_IN_STRNCPY:
1124 case BUILT_IN_MEMCPY_CHK:
1125 case BUILT_IN_MEMPCPY_CHK:
1126 case BUILT_IN_MEMMOVE_CHK:
1127 case BUILT_IN_MEMSET_CHK:
1128 case BUILT_IN_STRNCPY_CHK:
1132 case BUILT_IN_STRCPY_CHK:
1133 case BUILT_IN_STPCPY_CHK:
1137 case BUILT_IN_SNPRINTF_CHK:
1138 case BUILT_IN_VSNPRINTF_CHK:
1146 if (arg_idx >= nargs)
1149 /* Try to use the dataflow information gathered by the CCP process. */
1150 visited = BITMAP_ALLOC (NULL);
1151 bitmap_clear (visited);
1153 memset (val, 0, sizeof (val));
1154 a = gimple_call_arg (stmt, arg_idx);
1155 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1156 val[arg_idx] = NULL_TREE;
1158 BITMAP_FREE (visited);
1161 switch (DECL_FUNCTION_CODE (callee))
1163 case BUILT_IN_STRLEN:
1164 if (val[0] && nargs == 1)
1167 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1169 /* If the result is not a valid gimple value, or not a cast
1170 of a valid gimple value, then we cannot use the result. */
1171 if (is_gimple_val (new_val)
1172 || (is_gimple_cast (new_val)
1173 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1178 case BUILT_IN_STRCPY:
1179 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1180 result = fold_builtin_strcpy (loc, callee,
1181 gimple_call_arg (stmt, 0),
1182 gimple_call_arg (stmt, 1),
1186 case BUILT_IN_STRNCPY:
1187 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1188 result = fold_builtin_strncpy (loc, callee,
1189 gimple_call_arg (stmt, 0),
1190 gimple_call_arg (stmt, 1),
1191 gimple_call_arg (stmt, 2),
1195 case BUILT_IN_FPUTS:
1197 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1198 gimple_call_arg (stmt, 1),
1199 ignore, false, val[0]);
1202 case BUILT_IN_FPUTS_UNLOCKED:
1204 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1205 gimple_call_arg (stmt, 1),
1206 ignore, true, val[0]);
1209 case BUILT_IN_MEMCPY_CHK:
1210 case BUILT_IN_MEMPCPY_CHK:
1211 case BUILT_IN_MEMMOVE_CHK:
1212 case BUILT_IN_MEMSET_CHK:
1213 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1214 result = fold_builtin_memory_chk (loc, callee,
1215 gimple_call_arg (stmt, 0),
1216 gimple_call_arg (stmt, 1),
1217 gimple_call_arg (stmt, 2),
1218 gimple_call_arg (stmt, 3),
1220 DECL_FUNCTION_CODE (callee));
1223 case BUILT_IN_STRCPY_CHK:
1224 case BUILT_IN_STPCPY_CHK:
1225 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1226 result = fold_builtin_stxcpy_chk (loc, callee,
1227 gimple_call_arg (stmt, 0),
1228 gimple_call_arg (stmt, 1),
1229 gimple_call_arg (stmt, 2),
1231 DECL_FUNCTION_CODE (callee));
1234 case BUILT_IN_STRNCPY_CHK:
1235 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1236 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1237 gimple_call_arg (stmt, 1),
1238 gimple_call_arg (stmt, 2),
1239 gimple_call_arg (stmt, 3),
1243 case BUILT_IN_SNPRINTF_CHK:
1244 case BUILT_IN_VSNPRINTF_CHK:
1245 if (val[1] && is_gimple_val (val[1]))
1246 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1247 DECL_FUNCTION_CODE (callee));
1254 if (result && ignore)
1255 result = fold_ignored_result (result);
1259 /* Return the first of the base binfos of BINFO that has virtual functions. */
1262 get_first_base_binfo_with_virtuals (tree binfo)
1267 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1268 if (BINFO_VIRTUALS (base_binfo))
1275 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1276 it is found or NULL_TREE if it is not. */
1279 get_base_binfo_for_type (tree binfo, tree type)
1283 tree res = NULL_TREE;
1285 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1286 if (TREE_TYPE (base_binfo) == type)
1295 /* Return a binfo describing the part of object referenced by expression REF.
1296 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1297 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1298 a simple declaration, indirect reference or an SSA_NAME. If the function
1299 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1300 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1301 Otherwise the first non-artificial field declaration or the base declaration
1302 will be examined to get the encapsulating type. */
1305 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1309 if (TREE_CODE (ref) == COMPONENT_REF)
1312 tree binfo, base_binfo;
1313 tree field = TREE_OPERAND (ref, 1);
1315 if (!DECL_ARTIFICIAL (field))
1317 tree type = TREE_TYPE (field);
1318 if (TREE_CODE (type) == RECORD_TYPE)
1319 return TYPE_BINFO (type);
1324 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1325 binfo = TYPE_BINFO (par_type);
1327 || BINFO_N_BASE_BINFOS (binfo) == 0)
1330 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1331 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1335 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1337 /* Get descendant binfo. */
1340 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1343 ref = TREE_OPERAND (ref, 0);
1345 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1346 return TYPE_BINFO (TREE_TYPE (ref));
1347 else if (known_binfo
1348 && (TREE_CODE (ref) == SSA_NAME
1349 || TREE_CODE (ref) == MEM_REF))
1356 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1357 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1358 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1361 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1365 struct cgraph_node *node;
1367 v = BINFO_VIRTUALS (known_binfo);
1371 i += (TARGET_VTABLE_USES_DESCRIPTORS
1372 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1376 fndecl = TREE_VALUE (v);
1377 node = cgraph_get_node (fndecl);
1378 /* When cgraph node is missing and function is not public, we cannot
1379 devirtualize. This can happen in WHOPR when the actual method
1380 ends up in other partition, because we found devirtualization
1381 possibility too late. */
1382 if ((!node || (!node->analyzed && !node->in_other_partition))
1383 && (!TREE_PUBLIC (fndecl) || DECL_COMDAT (fndecl)))
1385 return build_fold_addr_expr (fndecl);
1389 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1390 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1391 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1392 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1393 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1396 gimple_fold_obj_type_ref (tree ref, tree known_type)
1398 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1399 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1402 if (TREE_CODE (obj) == ADDR_EXPR)
1403 obj = TREE_OPERAND (obj, 0);
1405 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1408 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1409 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1415 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1416 The statement may be replaced by another statement, e.g., if the call
1417 simplifies to a constant value. Return true if any changes were made.
1418 It is assumed that the operands have been previously folded. */
1421 fold_gimple_call (gimple_stmt_iterator *gsi)
1423 gimple stmt = gsi_stmt (*gsi);
1425 tree callee = gimple_call_fndecl (stmt);
1427 /* Check for builtins that CCP can handle using information not
1428 available in the generic fold routines. */
1429 if (callee && DECL_BUILT_IN (callee))
1431 tree result = gimple_fold_builtin (stmt);
1435 if (!update_call_from_tree (gsi, result))
1436 gimplify_and_update_call_from_tree (gsi, result);
1442 /* ??? Should perhaps do this in fold proper. However, doing it
1443 there requires that we create a new CALL_EXPR, and that requires
1444 copying EH region info to the new node. Easier to just do it
1445 here where we can just smash the call operand. */
1446 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1447 callee = gimple_call_fn (stmt);
1448 if (TREE_CODE (callee) == OBJ_TYPE_REF
1449 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1453 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1456 gimple_call_set_fn (stmt, t);
1465 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1466 distinguishes both cases. */
1469 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1471 bool changed = false;
1472 gimple stmt = gsi_stmt (*gsi);
1475 /* Fold the main computation performed by the statement. */
1476 switch (gimple_code (stmt))
1480 unsigned old_num_ops = gimple_num_ops (stmt);
1481 tree new_rhs = fold_gimple_assign (gsi);
1482 tree lhs = gimple_assign_lhs (stmt);
1484 && !useless_type_conversion_p (TREE_TYPE (lhs),
1485 TREE_TYPE (new_rhs)))
1486 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1489 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1491 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1498 changed |= fold_gimple_cond (stmt);
1502 /* Fold *& in call arguments. */
1503 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1504 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1506 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1509 gimple_call_set_arg (stmt, i, tmp);
1513 /* The entire statement may be replaced in this case. */
1515 changed |= fold_gimple_call (gsi);
1519 /* Fold *& in asm operands. */
1520 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1522 tree link = gimple_asm_output_op (stmt, i);
1523 tree op = TREE_VALUE (link);
1524 if (REFERENCE_CLASS_P (op)
1525 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1527 TREE_VALUE (link) = op;
1531 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1533 tree link = gimple_asm_input_op (stmt, i);
1534 tree op = TREE_VALUE (link);
1535 if (REFERENCE_CLASS_P (op)
1536 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1538 TREE_VALUE (link) = op;
1545 if (gimple_debug_bind_p (stmt))
1547 tree val = gimple_debug_bind_get_value (stmt);
1549 && REFERENCE_CLASS_P (val))
1551 tree tem = maybe_fold_reference (val, false);
1554 gimple_debug_bind_set_value (stmt, tem);
1564 stmt = gsi_stmt (*gsi);
1566 /* Fold *& on the lhs. */
1567 if (gimple_has_lhs (stmt))
1569 tree lhs = gimple_get_lhs (stmt);
1570 if (lhs && REFERENCE_CLASS_P (lhs))
1572 tree new_lhs = maybe_fold_reference (lhs, true);
1575 gimple_set_lhs (stmt, new_lhs);
1584 /* Fold the statement pointed to by GSI. In some cases, this function may
1585 replace the whole statement with a new one. Returns true iff folding
1587 The statement pointed to by GSI should be in valid gimple form but may
1588 be in unfolded state as resulting from for example constant propagation
1589 which can produce *&x = 0. */
1592 fold_stmt (gimple_stmt_iterator *gsi)
1594 return fold_stmt_1 (gsi, false);
1597 /* Perform the minimal folding on statement STMT. Only operations like
1598 *&x created by constant propagation are handled. The statement cannot
1599 be replaced with a new one. Return true if the statement was
1600 changed, false otherwise.
1601 The statement STMT should be in valid gimple form but may
1602 be in unfolded state as resulting from for example constant propagation
1603 which can produce *&x = 0. */
1606 fold_stmt_inplace (gimple stmt)
1608 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1609 bool changed = fold_stmt_1 (&gsi, true);
1610 gcc_assert (gsi_stmt (gsi) == stmt);
1614 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1615 if EXPR is null or we don't know how.
1616 If non-null, the result always has boolean type. */
1619 canonicalize_bool (tree expr, bool invert)
1625 if (integer_nonzerop (expr))
1626 return boolean_false_node;
1627 else if (integer_zerop (expr))
1628 return boolean_true_node;
1629 else if (TREE_CODE (expr) == SSA_NAME)
1630 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1631 build_int_cst (TREE_TYPE (expr), 0));
1632 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1633 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1635 TREE_OPERAND (expr, 0),
1636 TREE_OPERAND (expr, 1));
1642 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1644 if (integer_nonzerop (expr))
1645 return boolean_true_node;
1646 else if (integer_zerop (expr))
1647 return boolean_false_node;
1648 else if (TREE_CODE (expr) == SSA_NAME)
1649 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1650 build_int_cst (TREE_TYPE (expr), 0));
1651 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1652 return fold_build2 (TREE_CODE (expr),
1654 TREE_OPERAND (expr, 0),
1655 TREE_OPERAND (expr, 1));
1661 /* Check to see if a boolean expression EXPR is logically equivalent to the
1662 comparison (OP1 CODE OP2). Check for various identities involving
1666 same_bool_comparison_p (const_tree expr, enum tree_code code,
1667 const_tree op1, const_tree op2)
1671 /* The obvious case. */
1672 if (TREE_CODE (expr) == code
1673 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1674 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1677 /* Check for comparing (name, name != 0) and the case where expr
1678 is an SSA_NAME with a definition matching the comparison. */
1679 if (TREE_CODE (expr) == SSA_NAME
1680 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1682 if (operand_equal_p (expr, op1, 0))
1683 return ((code == NE_EXPR && integer_zerop (op2))
1684 || (code == EQ_EXPR && integer_nonzerop (op2)));
1685 s = SSA_NAME_DEF_STMT (expr);
1686 if (is_gimple_assign (s)
1687 && gimple_assign_rhs_code (s) == code
1688 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1689 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1693 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1694 of name is a comparison, recurse. */
1695 if (TREE_CODE (op1) == SSA_NAME
1696 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1698 s = SSA_NAME_DEF_STMT (op1);
1699 if (is_gimple_assign (s)
1700 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1702 enum tree_code c = gimple_assign_rhs_code (s);
1703 if ((c == NE_EXPR && integer_zerop (op2))
1704 || (c == EQ_EXPR && integer_nonzerop (op2)))
1705 return same_bool_comparison_p (expr, c,
1706 gimple_assign_rhs1 (s),
1707 gimple_assign_rhs2 (s));
1708 if ((c == EQ_EXPR && integer_zerop (op2))
1709 || (c == NE_EXPR && integer_nonzerop (op2)))
1710 return same_bool_comparison_p (expr,
1711 invert_tree_comparison (c, false),
1712 gimple_assign_rhs1 (s),
1713 gimple_assign_rhs2 (s));
1719 /* Check to see if two boolean expressions OP1 and OP2 are logically
1723 same_bool_result_p (const_tree op1, const_tree op2)
1725 /* Simple cases first. */
1726 if (operand_equal_p (op1, op2, 0))
1729 /* Check the cases where at least one of the operands is a comparison.
1730 These are a bit smarter than operand_equal_p in that they apply some
1731 identifies on SSA_NAMEs. */
1732 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1733 && same_bool_comparison_p (op1, TREE_CODE (op2),
1734 TREE_OPERAND (op2, 0),
1735 TREE_OPERAND (op2, 1)))
1737 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1738 && same_bool_comparison_p (op2, TREE_CODE (op1),
1739 TREE_OPERAND (op1, 0),
1740 TREE_OPERAND (op1, 1)))
1747 /* Forward declarations for some mutually recursive functions. */
1750 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1751 enum tree_code code2, tree op2a, tree op2b);
1753 and_var_with_comparison (tree var, bool invert,
1754 enum tree_code code2, tree op2a, tree op2b);
1756 and_var_with_comparison_1 (gimple stmt,
1757 enum tree_code code2, tree op2a, tree op2b);
1759 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1760 enum tree_code code2, tree op2a, tree op2b);
1762 or_var_with_comparison (tree var, bool invert,
1763 enum tree_code code2, tree op2a, tree op2b);
1765 or_var_with_comparison_1 (gimple stmt,
1766 enum tree_code code2, tree op2a, tree op2b);
1768 /* Helper function for and_comparisons_1: try to simplify the AND of the
1769 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1770 If INVERT is true, invert the value of the VAR before doing the AND.
1771 Return NULL_EXPR if we can't simplify this to a single expression. */
1774 and_var_with_comparison (tree var, bool invert,
1775 enum tree_code code2, tree op2a, tree op2b)
1778 gimple stmt = SSA_NAME_DEF_STMT (var);
1780 /* We can only deal with variables whose definitions are assignments. */
1781 if (!is_gimple_assign (stmt))
1784 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1785 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1786 Then we only have to consider the simpler non-inverted cases. */
1788 t = or_var_with_comparison_1 (stmt,
1789 invert_tree_comparison (code2, false),
1792 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1793 return canonicalize_bool (t, invert);
1796 /* Try to simplify the AND of the ssa variable defined by the assignment
1797 STMT with the comparison specified by (OP2A CODE2 OP2B).
1798 Return NULL_EXPR if we can't simplify this to a single expression. */
1801 and_var_with_comparison_1 (gimple stmt,
1802 enum tree_code code2, tree op2a, tree op2b)
1804 tree var = gimple_assign_lhs (stmt);
1805 tree true_test_var = NULL_TREE;
1806 tree false_test_var = NULL_TREE;
1807 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1809 /* Check for identities like (var AND (var == 0)) => false. */
1810 if (TREE_CODE (op2a) == SSA_NAME
1811 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1813 if ((code2 == NE_EXPR && integer_zerop (op2b))
1814 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1816 true_test_var = op2a;
1817 if (var == true_test_var)
1820 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1821 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1823 false_test_var = op2a;
1824 if (var == false_test_var)
1825 return boolean_false_node;
1829 /* If the definition is a comparison, recurse on it. */
1830 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1832 tree t = and_comparisons_1 (innercode,
1833 gimple_assign_rhs1 (stmt),
1834 gimple_assign_rhs2 (stmt),
1842 /* If the definition is an AND or OR expression, we may be able to
1843 simplify by reassociating. */
1844 if (innercode == TRUTH_AND_EXPR
1845 || innercode == TRUTH_OR_EXPR
1846 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1847 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1849 tree inner1 = gimple_assign_rhs1 (stmt);
1850 tree inner2 = gimple_assign_rhs2 (stmt);
1853 tree partial = NULL_TREE;
1854 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1856 /* Check for boolean identities that don't require recursive examination
1858 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1859 inner1 AND (inner1 OR inner2) => inner1
1860 !inner1 AND (inner1 AND inner2) => false
1861 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1862 Likewise for similar cases involving inner2. */
1863 if (inner1 == true_test_var)
1864 return (is_and ? var : inner1);
1865 else if (inner2 == true_test_var)
1866 return (is_and ? var : inner2);
1867 else if (inner1 == false_test_var)
1869 ? boolean_false_node
1870 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1871 else if (inner2 == false_test_var)
1873 ? boolean_false_node
1874 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1876 /* Next, redistribute/reassociate the AND across the inner tests.
1877 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1878 if (TREE_CODE (inner1) == SSA_NAME
1879 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1880 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1881 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1882 gimple_assign_rhs1 (s),
1883 gimple_assign_rhs2 (s),
1884 code2, op2a, op2b)))
1886 /* Handle the AND case, where we are reassociating:
1887 (inner1 AND inner2) AND (op2a code2 op2b)
1889 If the partial result t is a constant, we win. Otherwise
1890 continue on to try reassociating with the other inner test. */
1893 if (integer_onep (t))
1895 else if (integer_zerop (t))
1896 return boolean_false_node;
1899 /* Handle the OR case, where we are redistributing:
1900 (inner1 OR inner2) AND (op2a code2 op2b)
1901 => (t OR (inner2 AND (op2a code2 op2b))) */
1904 if (integer_onep (t))
1905 return boolean_true_node;
1907 /* Save partial result for later. */
1912 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1913 if (TREE_CODE (inner2) == SSA_NAME
1914 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1915 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1916 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1917 gimple_assign_rhs1 (s),
1918 gimple_assign_rhs2 (s),
1919 code2, op2a, op2b)))
1921 /* Handle the AND case, where we are reassociating:
1922 (inner1 AND inner2) AND (op2a code2 op2b)
1923 => (inner1 AND t) */
1926 if (integer_onep (t))
1928 else if (integer_zerop (t))
1929 return boolean_false_node;
1932 /* Handle the OR case. where we are redistributing:
1933 (inner1 OR inner2) AND (op2a code2 op2b)
1934 => (t OR (inner1 AND (op2a code2 op2b)))
1935 => (t OR partial) */
1938 if (integer_onep (t))
1939 return boolean_true_node;
1942 /* We already got a simplification for the other
1943 operand to the redistributed OR expression. The
1944 interesting case is when at least one is false.
1945 Or, if both are the same, we can apply the identity
1947 if (integer_zerop (partial))
1949 else if (integer_zerop (t))
1951 else if (same_bool_result_p (t, partial))
1960 /* Try to simplify the AND of two comparisons defined by
1961 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1962 If this can be done without constructing an intermediate value,
1963 return the resulting tree; otherwise NULL_TREE is returned.
1964 This function is deliberately asymmetric as it recurses on SSA_DEFs
1965 in the first comparison but not the second. */
1968 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1969 enum tree_code code2, tree op2a, tree op2b)
1971 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1972 if (operand_equal_p (op1a, op2a, 0)
1973 && operand_equal_p (op1b, op2b, 0))
1975 tree t = combine_comparisons (UNKNOWN_LOCATION,
1976 TRUTH_ANDIF_EXPR, code1, code2,
1977 boolean_type_node, op1a, op1b);
1982 /* Likewise the swapped case of the above. */
1983 if (operand_equal_p (op1a, op2b, 0)
1984 && operand_equal_p (op1b, op2a, 0))
1986 tree t = combine_comparisons (UNKNOWN_LOCATION,
1987 TRUTH_ANDIF_EXPR, code1,
1988 swap_tree_comparison (code2),
1989 boolean_type_node, op1a, op1b);
1994 /* If both comparisons are of the same value against constants, we might
1995 be able to merge them. */
1996 if (operand_equal_p (op1a, op2a, 0)
1997 && TREE_CODE (op1b) == INTEGER_CST
1998 && TREE_CODE (op2b) == INTEGER_CST)
2000 int cmp = tree_int_cst_compare (op1b, op2b);
2002 /* If we have (op1a == op1b), we should either be able to
2003 return that or FALSE, depending on whether the constant op1b
2004 also satisfies the other comparison against op2b. */
2005 if (code1 == EQ_EXPR)
2011 case EQ_EXPR: val = (cmp == 0); break;
2012 case NE_EXPR: val = (cmp != 0); break;
2013 case LT_EXPR: val = (cmp < 0); break;
2014 case GT_EXPR: val = (cmp > 0); break;
2015 case LE_EXPR: val = (cmp <= 0); break;
2016 case GE_EXPR: val = (cmp >= 0); break;
2017 default: done = false;
2022 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2024 return boolean_false_node;
2027 /* Likewise if the second comparison is an == comparison. */
2028 else if (code2 == EQ_EXPR)
2034 case EQ_EXPR: val = (cmp == 0); break;
2035 case NE_EXPR: val = (cmp != 0); break;
2036 case LT_EXPR: val = (cmp > 0); break;
2037 case GT_EXPR: val = (cmp < 0); break;
2038 case LE_EXPR: val = (cmp >= 0); break;
2039 case GE_EXPR: val = (cmp <= 0); break;
2040 default: done = false;
2045 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2047 return boolean_false_node;
2051 /* Same business with inequality tests. */
2052 else if (code1 == NE_EXPR)
2057 case EQ_EXPR: val = (cmp != 0); break;
2058 case NE_EXPR: val = (cmp == 0); break;
2059 case LT_EXPR: val = (cmp >= 0); break;
2060 case GT_EXPR: val = (cmp <= 0); break;
2061 case LE_EXPR: val = (cmp > 0); break;
2062 case GE_EXPR: val = (cmp < 0); break;
2067 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2069 else if (code2 == NE_EXPR)
2074 case EQ_EXPR: val = (cmp == 0); break;
2075 case NE_EXPR: val = (cmp != 0); break;
2076 case LT_EXPR: val = (cmp <= 0); break;
2077 case GT_EXPR: val = (cmp >= 0); break;
2078 case LE_EXPR: val = (cmp < 0); break;
2079 case GE_EXPR: val = (cmp > 0); break;
2084 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2087 /* Chose the more restrictive of two < or <= comparisons. */
2088 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2089 && (code2 == LT_EXPR || code2 == LE_EXPR))
2091 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2092 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2094 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2097 /* Likewise chose the more restrictive of two > or >= comparisons. */
2098 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2099 && (code2 == GT_EXPR || code2 == GE_EXPR))
2101 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2102 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2104 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2107 /* Check for singleton ranges. */
2109 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2110 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2111 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2113 /* Check for disjoint ranges. */
2115 && (code1 == LT_EXPR || code1 == LE_EXPR)
2116 && (code2 == GT_EXPR || code2 == GE_EXPR))
2117 return boolean_false_node;
2119 && (code1 == GT_EXPR || code1 == GE_EXPR)
2120 && (code2 == LT_EXPR || code2 == LE_EXPR))
2121 return boolean_false_node;
2124 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2125 NAME's definition is a truth value. See if there are any simplifications
2126 that can be done against the NAME's definition. */
2127 if (TREE_CODE (op1a) == SSA_NAME
2128 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2129 && (integer_zerop (op1b) || integer_onep (op1b)))
2131 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2132 || (code1 == NE_EXPR && integer_onep (op1b)));
2133 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2134 switch (gimple_code (stmt))
2137 /* Try to simplify by copy-propagating the definition. */
2138 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2141 /* If every argument to the PHI produces the same result when
2142 ANDed with the second comparison, we win.
2143 Do not do this unless the type is bool since we need a bool
2144 result here anyway. */
2145 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2147 tree result = NULL_TREE;
2149 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2151 tree arg = gimple_phi_arg_def (stmt, i);
2153 /* If this PHI has itself as an argument, ignore it.
2154 If all the other args produce the same result,
2156 if (arg == gimple_phi_result (stmt))
2158 else if (TREE_CODE (arg) == INTEGER_CST)
2160 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2163 result = boolean_false_node;
2164 else if (!integer_zerop (result))
2168 result = fold_build2 (code2, boolean_type_node,
2170 else if (!same_bool_comparison_p (result,
2174 else if (TREE_CODE (arg) == SSA_NAME)
2176 tree temp = and_var_with_comparison (arg, invert,
2182 else if (!same_bool_result_p (result, temp))
2198 /* Try to simplify the AND of two comparisons, specified by
2199 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2200 If this can be simplified to a single expression (without requiring
2201 introducing more SSA variables to hold intermediate values),
2202 return the resulting tree. Otherwise return NULL_TREE.
2203 If the result expression is non-null, it has boolean type. */
2206 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2207 enum tree_code code2, tree op2a, tree op2b)
2209 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2213 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2216 /* Helper function for or_comparisons_1: try to simplify the OR of the
2217 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2218 If INVERT is true, invert the value of VAR before doing the OR.
2219 Return NULL_EXPR if we can't simplify this to a single expression. */
2222 or_var_with_comparison (tree var, bool invert,
2223 enum tree_code code2, tree op2a, tree op2b)
2226 gimple stmt = SSA_NAME_DEF_STMT (var);
2228 /* We can only deal with variables whose definitions are assignments. */
2229 if (!is_gimple_assign (stmt))
2232 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2233 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2234 Then we only have to consider the simpler non-inverted cases. */
2236 t = and_var_with_comparison_1 (stmt,
2237 invert_tree_comparison (code2, false),
2240 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2241 return canonicalize_bool (t, invert);
2244 /* Try to simplify the OR of the ssa variable defined by the assignment
2245 STMT with the comparison specified by (OP2A CODE2 OP2B).
2246 Return NULL_EXPR if we can't simplify this to a single expression. */
2249 or_var_with_comparison_1 (gimple stmt,
2250 enum tree_code code2, tree op2a, tree op2b)
2252 tree var = gimple_assign_lhs (stmt);
2253 tree true_test_var = NULL_TREE;
2254 tree false_test_var = NULL_TREE;
2255 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2257 /* Check for identities like (var OR (var != 0)) => true . */
2258 if (TREE_CODE (op2a) == SSA_NAME
2259 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2261 if ((code2 == NE_EXPR && integer_zerop (op2b))
2262 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2264 true_test_var = op2a;
2265 if (var == true_test_var)
2268 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2269 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2271 false_test_var = op2a;
2272 if (var == false_test_var)
2273 return boolean_true_node;
2277 /* If the definition is a comparison, recurse on it. */
2278 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2280 tree t = or_comparisons_1 (innercode,
2281 gimple_assign_rhs1 (stmt),
2282 gimple_assign_rhs2 (stmt),
2290 /* If the definition is an AND or OR expression, we may be able to
2291 simplify by reassociating. */
2292 if (innercode == TRUTH_AND_EXPR
2293 || innercode == TRUTH_OR_EXPR
2294 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2295 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2297 tree inner1 = gimple_assign_rhs1 (stmt);
2298 tree inner2 = gimple_assign_rhs2 (stmt);
2301 tree partial = NULL_TREE;
2302 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2304 /* Check for boolean identities that don't require recursive examination
2306 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2307 inner1 OR (inner1 AND inner2) => inner1
2308 !inner1 OR (inner1 OR inner2) => true
2309 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2311 if (inner1 == true_test_var)
2312 return (is_or ? var : inner1);
2313 else if (inner2 == true_test_var)
2314 return (is_or ? var : inner2);
2315 else if (inner1 == false_test_var)
2318 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2319 else if (inner2 == false_test_var)
2322 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2324 /* Next, redistribute/reassociate the OR across the inner tests.
2325 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2326 if (TREE_CODE (inner1) == SSA_NAME
2327 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2328 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2329 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2330 gimple_assign_rhs1 (s),
2331 gimple_assign_rhs2 (s),
2332 code2, op2a, op2b)))
2334 /* Handle the OR case, where we are reassociating:
2335 (inner1 OR inner2) OR (op2a code2 op2b)
2337 If the partial result t is a constant, we win. Otherwise
2338 continue on to try reassociating with the other inner test. */
2339 if (innercode == TRUTH_OR_EXPR)
2341 if (integer_onep (t))
2342 return boolean_true_node;
2343 else if (integer_zerop (t))
2347 /* Handle the AND case, where we are redistributing:
2348 (inner1 AND inner2) OR (op2a code2 op2b)
2349 => (t AND (inner2 OR (op2a code op2b))) */
2352 if (integer_zerop (t))
2353 return boolean_false_node;
2355 /* Save partial result for later. */
2360 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2361 if (TREE_CODE (inner2) == SSA_NAME
2362 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2363 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2364 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2365 gimple_assign_rhs1 (s),
2366 gimple_assign_rhs2 (s),
2367 code2, op2a, op2b)))
2369 /* Handle the OR case, where we are reassociating:
2370 (inner1 OR inner2) OR (op2a code2 op2b)
2372 if (innercode == TRUTH_OR_EXPR)
2374 if (integer_zerop (t))
2376 else if (integer_onep (t))
2377 return boolean_true_node;
2380 /* Handle the AND case, where we are redistributing:
2381 (inner1 AND inner2) OR (op2a code2 op2b)
2382 => (t AND (inner1 OR (op2a code2 op2b)))
2383 => (t AND partial) */
2386 if (integer_zerop (t))
2387 return boolean_false_node;
2390 /* We already got a simplification for the other
2391 operand to the redistributed AND expression. The
2392 interesting case is when at least one is true.
2393 Or, if both are the same, we can apply the identity
2394 (x AND x) == true. */
2395 if (integer_onep (partial))
2397 else if (integer_onep (t))
2399 else if (same_bool_result_p (t, partial))
2400 return boolean_true_node;
2408 /* Try to simplify the OR of two comparisons defined by
2409 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2410 If this can be done without constructing an intermediate value,
2411 return the resulting tree; otherwise NULL_TREE is returned.
2412 This function is deliberately asymmetric as it recurses on SSA_DEFs
2413 in the first comparison but not the second. */
2416 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2417 enum tree_code code2, tree op2a, tree op2b)
2419 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2420 if (operand_equal_p (op1a, op2a, 0)
2421 && operand_equal_p (op1b, op2b, 0))
2423 tree t = combine_comparisons (UNKNOWN_LOCATION,
2424 TRUTH_ORIF_EXPR, code1, code2,
2425 boolean_type_node, op1a, op1b);
2430 /* Likewise the swapped case of the above. */
2431 if (operand_equal_p (op1a, op2b, 0)
2432 && operand_equal_p (op1b, op2a, 0))
2434 tree t = combine_comparisons (UNKNOWN_LOCATION,
2435 TRUTH_ORIF_EXPR, code1,
2436 swap_tree_comparison (code2),
2437 boolean_type_node, op1a, op1b);
2442 /* If both comparisons are of the same value against constants, we might
2443 be able to merge them. */
2444 if (operand_equal_p (op1a, op2a, 0)
2445 && TREE_CODE (op1b) == INTEGER_CST
2446 && TREE_CODE (op2b) == INTEGER_CST)
2448 int cmp = tree_int_cst_compare (op1b, op2b);
2450 /* If we have (op1a != op1b), we should either be able to
2451 return that or TRUE, depending on whether the constant op1b
2452 also satisfies the other comparison against op2b. */
2453 if (code1 == NE_EXPR)
2459 case EQ_EXPR: val = (cmp == 0); break;
2460 case NE_EXPR: val = (cmp != 0); break;
2461 case LT_EXPR: val = (cmp < 0); break;
2462 case GT_EXPR: val = (cmp > 0); break;
2463 case LE_EXPR: val = (cmp <= 0); break;
2464 case GE_EXPR: val = (cmp >= 0); break;
2465 default: done = false;
2470 return boolean_true_node;
2472 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2475 /* Likewise if the second comparison is a != comparison. */
2476 else if (code2 == NE_EXPR)
2482 case EQ_EXPR: val = (cmp == 0); break;
2483 case NE_EXPR: val = (cmp != 0); break;
2484 case LT_EXPR: val = (cmp > 0); break;
2485 case GT_EXPR: val = (cmp < 0); break;
2486 case LE_EXPR: val = (cmp >= 0); break;
2487 case GE_EXPR: val = (cmp <= 0); break;
2488 default: done = false;
2493 return boolean_true_node;
2495 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2499 /* See if an equality test is redundant with the other comparison. */
2500 else if (code1 == EQ_EXPR)
2505 case EQ_EXPR: val = (cmp == 0); break;
2506 case NE_EXPR: val = (cmp != 0); break;
2507 case LT_EXPR: val = (cmp < 0); break;
2508 case GT_EXPR: val = (cmp > 0); break;
2509 case LE_EXPR: val = (cmp <= 0); break;
2510 case GE_EXPR: val = (cmp >= 0); break;
2515 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2517 else if (code2 == EQ_EXPR)
2522 case EQ_EXPR: val = (cmp == 0); break;
2523 case NE_EXPR: val = (cmp != 0); break;
2524 case LT_EXPR: val = (cmp > 0); break;
2525 case GT_EXPR: val = (cmp < 0); break;
2526 case LE_EXPR: val = (cmp >= 0); break;
2527 case GE_EXPR: val = (cmp <= 0); break;
2532 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2535 /* Chose the less restrictive of two < or <= comparisons. */
2536 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2537 && (code2 == LT_EXPR || code2 == LE_EXPR))
2539 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2540 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2542 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2545 /* Likewise chose the less restrictive of two > or >= comparisons. */
2546 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2547 && (code2 == GT_EXPR || code2 == GE_EXPR))
2549 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2550 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2552 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2555 /* Check for singleton ranges. */
2557 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2558 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2559 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2561 /* Check for less/greater pairs that don't restrict the range at all. */
2563 && (code1 == LT_EXPR || code1 == LE_EXPR)
2564 && (code2 == GT_EXPR || code2 == GE_EXPR))
2565 return boolean_true_node;
2567 && (code1 == GT_EXPR || code1 == GE_EXPR)
2568 && (code2 == LT_EXPR || code2 == LE_EXPR))
2569 return boolean_true_node;
2572 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2573 NAME's definition is a truth value. See if there are any simplifications
2574 that can be done against the NAME's definition. */
2575 if (TREE_CODE (op1a) == SSA_NAME
2576 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2577 && (integer_zerop (op1b) || integer_onep (op1b)))
2579 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2580 || (code1 == NE_EXPR && integer_onep (op1b)));
2581 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2582 switch (gimple_code (stmt))
2585 /* Try to simplify by copy-propagating the definition. */
2586 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2589 /* If every argument to the PHI produces the same result when
2590 ORed with the second comparison, we win.
2591 Do not do this unless the type is bool since we need a bool
2592 result here anyway. */
2593 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2595 tree result = NULL_TREE;
2597 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2599 tree arg = gimple_phi_arg_def (stmt, i);
2601 /* If this PHI has itself as an argument, ignore it.
2602 If all the other args produce the same result,
2604 if (arg == gimple_phi_result (stmt))
2606 else if (TREE_CODE (arg) == INTEGER_CST)
2608 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2611 result = boolean_true_node;
2612 else if (!integer_onep (result))
2616 result = fold_build2 (code2, boolean_type_node,
2618 else if (!same_bool_comparison_p (result,
2622 else if (TREE_CODE (arg) == SSA_NAME)
2624 tree temp = or_var_with_comparison (arg, invert,
2630 else if (!same_bool_result_p (result, temp))
2646 /* Try to simplify the OR of two comparisons, specified by
2647 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2648 If this can be simplified to a single expression (without requiring
2649 introducing more SSA variables to hold intermediate values),
2650 return the resulting tree. Otherwise return NULL_TREE.
2651 If the result expression is non-null, it has boolean type. */
2654 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2655 enum tree_code code2, tree op2a, tree op2b)
2657 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2661 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);