1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
35 acceptable form for is_gimple_min_invariant. */
38 canonicalize_constructor_val (tree cval)
41 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
43 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
44 TREE_OPERAND (cval, 0),
45 TREE_OPERAND (cval, 1),
50 if (TREE_CODE (cval) == ADDR_EXPR)
52 tree base = get_base_address (TREE_OPERAND (cval, 0));
53 if (base && TREE_CODE (base) == VAR_DECL)
54 add_referenced_var (base);
59 /* If SYM is a constant variable with known value, return the value.
60 NULL_TREE is returned otherwise. */
63 get_symbol_constant_value (tree sym)
65 if ((TREE_STATIC (sym) || DECL_EXTERNAL (sym))
66 && (TREE_CODE (sym) == CONST_DECL
67 || varpool_get_node (sym)->const_value_known))
69 tree val = DECL_INITIAL (sym);
72 val = canonicalize_constructor_val (val);
73 if (is_gimple_min_invariant (val))
76 /* Variables declared 'const' without an initializer
77 have zero as the initializer if they may not be
78 overridden at link or run time. */
80 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
81 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
82 return fold_convert (TREE_TYPE (sym), integer_zero_node);
89 /* Return true if we may propagate the address expression ADDR into the
90 dereference DEREF and cancel them. */
93 may_propagate_address_into_dereference (tree addr, tree deref)
95 gcc_assert (TREE_CODE (deref) == MEM_REF
96 && TREE_CODE (addr) == ADDR_EXPR);
98 /* Don't propagate if ADDR's operand has incomplete type. */
99 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
102 /* If the address is invariant then we do not need to preserve restrict
103 qualifications. But we do need to preserve volatile qualifiers until
104 we can annotate the folded dereference itself properly. */
105 if (is_gimple_min_invariant (addr)
106 && (!TREE_THIS_VOLATILE (deref)
107 || TYPE_VOLATILE (TREE_TYPE (addr))))
108 return useless_type_conversion_p (TREE_TYPE (deref),
109 TREE_TYPE (TREE_OPERAND (addr, 0)));
111 /* Else both the address substitution and the folding must result in
112 a valid useless type conversion sequence. */
113 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
115 && useless_type_conversion_p (TREE_TYPE (deref),
116 TREE_TYPE (TREE_OPERAND (addr, 0))));
120 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
121 BASE is an array type. OFFSET is a byte displacement.
123 LOC is the location of the original expression. */
126 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
128 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
129 tree array_type, elt_type, elt_size;
132 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
133 measured in units of the size of elements type) from that ARRAY_REF).
134 We can't do anything if either is variable.
136 The case we handle here is *(&A[N]+O). */
137 if (TREE_CODE (base) == ARRAY_REF)
139 tree low_bound = array_ref_low_bound (base);
141 elt_offset = TREE_OPERAND (base, 1);
142 if (TREE_CODE (low_bound) != INTEGER_CST
143 || TREE_CODE (elt_offset) != INTEGER_CST)
146 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
147 base = TREE_OPERAND (base, 0);
150 /* Ignore stupid user tricks of indexing non-array variables. */
151 array_type = TREE_TYPE (base);
152 if (TREE_CODE (array_type) != ARRAY_TYPE)
154 elt_type = TREE_TYPE (array_type);
156 /* Use signed size type for intermediate computation on the index. */
157 idx_type = ssizetype;
159 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
160 element type (so we can use the alignment if it's not constant).
161 Otherwise, compute the offset as an index by using a division. If the
162 division isn't exact, then don't do anything. */
163 elt_size = TYPE_SIZE_UNIT (elt_type);
166 if (integer_zerop (offset))
168 if (TREE_CODE (elt_size) != INTEGER_CST)
169 elt_size = size_int (TYPE_ALIGN (elt_type));
171 idx = build_int_cst (idx_type, 0);
175 unsigned HOST_WIDE_INT lquo, lrem;
176 HOST_WIDE_INT hquo, hrem;
179 /* The final array offset should be signed, so we need
180 to sign-extend the (possibly pointer) offset here
181 and use signed division. */
182 soffset = double_int_sext (tree_to_double_int (offset),
183 TYPE_PRECISION (TREE_TYPE (offset)));
184 if (TREE_CODE (elt_size) != INTEGER_CST
185 || div_and_round_double (TRUNC_DIV_EXPR, 0,
186 soffset.low, soffset.high,
187 TREE_INT_CST_LOW (elt_size),
188 TREE_INT_CST_HIGH (elt_size),
189 &lquo, &hquo, &lrem, &hrem)
193 idx = build_int_cst_wide (idx_type, lquo, hquo);
196 /* Assume the low bound is zero. If there is a domain type, get the
197 low bound, if any, convert the index into that type, and add the
199 min_idx = build_int_cst (idx_type, 0);
200 domain_type = TYPE_DOMAIN (array_type);
203 idx_type = domain_type;
204 if (TYPE_MIN_VALUE (idx_type))
205 min_idx = TYPE_MIN_VALUE (idx_type);
207 min_idx = fold_convert (idx_type, min_idx);
209 if (TREE_CODE (min_idx) != INTEGER_CST)
212 elt_offset = fold_convert (idx_type, elt_offset);
215 if (!integer_zerop (min_idx))
216 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
217 if (!integer_zerop (elt_offset))
218 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
220 /* Make sure to possibly truncate late after offsetting. */
221 idx = fold_convert (idx_type, idx);
223 /* We don't want to construct access past array bounds. For example
226 should not be simplified into (*c)[14] or tree-vrp will
228 This is only an issue for multi-dimensional arrays. */
229 if (TREE_CODE (elt_type) == ARRAY_TYPE
232 if (TYPE_MAX_VALUE (domain_type)
233 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
234 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
236 else if (TYPE_MIN_VALUE (domain_type)
237 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
238 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
240 else if (compare_tree_int (idx, 0) < 0)
245 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
246 SET_EXPR_LOCATION (t, loc);
252 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
253 LOC is the location of original expression.
255 Before attempting the conversion strip off existing ADDR_EXPRs. */
258 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
264 if (TREE_CODE (base) != ADDR_EXPR)
267 base = TREE_OPERAND (base, 0);
268 if (types_compatible_p (orig_type, TREE_TYPE (base))
269 && integer_zerop (offset))
272 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
273 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
278 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
279 LOC is the location of the original expression. */
282 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
288 if (TREE_CODE (addr) != ADDR_EXPR)
290 base = TREE_OPERAND (addr, 0);
291 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
294 ret = build_fold_addr_expr (ret);
295 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
297 SET_EXPR_LOCATION (ret, loc);
304 /* A quaint feature extant in our address arithmetic is that there
305 can be hidden type changes here. The type of the result need
306 not be the same as the type of the input pointer.
308 What we're after here is an expression of the form
309 (T *)(&array + const)
310 where array is OP0, const is OP1, RES_TYPE is T and
311 the cast doesn't actually exist, but is implicit in the
312 type of the POINTER_PLUS_EXPR. We'd like to turn this into
314 which may be able to propagate further. */
317 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
322 /* The first operand should be an ADDR_EXPR. */
323 if (TREE_CODE (op0) != ADDR_EXPR)
325 op0 = TREE_OPERAND (op0, 0);
327 /* It had better be a constant. */
328 if (TREE_CODE (op1) != INTEGER_CST)
330 /* Or op0 should now be A[0] and the non-constant offset defined
331 via a multiplication by the array element size. */
332 if (TREE_CODE (op0) == ARRAY_REF
333 /* As we will end up creating a variable index array access
334 in the outermost array dimension make sure there isn't
335 a more inner array that the index could overflow to. */
336 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
337 && integer_zerop (TREE_OPERAND (op0, 1))
338 && TREE_CODE (op1) == SSA_NAME)
340 gimple offset_def = SSA_NAME_DEF_STMT (op1);
341 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
342 if (!host_integerp (elsz, 1)
343 || !is_gimple_assign (offset_def))
346 /* Do not build array references of something that we can't
347 see the true number of array dimensions for. */
348 if (!DECL_P (TREE_OPERAND (op0, 0))
349 && !handled_component_p (TREE_OPERAND (op0, 0)))
352 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
353 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
354 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
355 return build_fold_addr_expr
356 (build4 (ARRAY_REF, TREE_TYPE (op0),
357 TREE_OPERAND (op0, 0),
358 gimple_assign_rhs1 (offset_def),
359 TREE_OPERAND (op0, 2),
360 TREE_OPERAND (op0, 3)));
361 else if (integer_onep (elsz)
362 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
363 return build_fold_addr_expr
364 (build4 (ARRAY_REF, TREE_TYPE (op0),
365 TREE_OPERAND (op0, 0),
367 TREE_OPERAND (op0, 2),
368 TREE_OPERAND (op0, 3)));
370 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
372 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
373 && TREE_CODE (op1) == SSA_NAME)
375 gimple offset_def = SSA_NAME_DEF_STMT (op1);
376 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
377 if (!host_integerp (elsz, 1)
378 || !is_gimple_assign (offset_def))
381 /* Do not build array references of something that we can't
382 see the true number of array dimensions for. */
384 && !handled_component_p (op0))
387 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
388 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
389 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
390 return build_fold_addr_expr
391 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
392 op0, gimple_assign_rhs1 (offset_def),
393 integer_zero_node, NULL_TREE));
394 else if (integer_onep (elsz)
395 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
396 return build_fold_addr_expr
397 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
399 integer_zero_node, NULL_TREE));
405 /* If the first operand is an ARRAY_REF, expand it so that we can fold
406 the offset into it. */
407 while (TREE_CODE (op0) == ARRAY_REF)
409 tree array_obj = TREE_OPERAND (op0, 0);
410 tree array_idx = TREE_OPERAND (op0, 1);
411 tree elt_type = TREE_TYPE (op0);
412 tree elt_size = TYPE_SIZE_UNIT (elt_type);
415 if (TREE_CODE (array_idx) != INTEGER_CST)
417 if (TREE_CODE (elt_size) != INTEGER_CST)
420 /* Un-bias the index by the min index of the array type. */
421 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
424 min_idx = TYPE_MIN_VALUE (min_idx);
427 if (TREE_CODE (min_idx) != INTEGER_CST)
430 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
431 if (!integer_zerop (min_idx))
432 array_idx = int_const_binop (MINUS_EXPR, array_idx,
437 /* Convert the index to a byte offset. */
438 array_idx = fold_convert (sizetype, array_idx);
439 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
441 /* Update the operands for the next round, or for folding. */
442 op1 = int_const_binop (PLUS_EXPR,
447 ptd_type = TREE_TYPE (res_type);
448 /* If we want a pointer to void, reconstruct the reference from the
449 array element type. A pointer to that can be trivially converted
450 to void *. This happens as we fold (void *)(ptr p+ off). */
451 if (VOID_TYPE_P (ptd_type)
452 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
453 ptd_type = TREE_TYPE (TREE_TYPE (op0));
455 /* At which point we can try some of the same things as for indirects. */
456 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
459 t = build_fold_addr_expr (t);
460 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
462 SET_EXPR_LOCATION (t, loc);
468 /* Subroutine of fold_stmt. We perform several simplifications of the
469 memory reference tree EXPR and make sure to re-gimplify them properly
470 after propagation of constant addresses. IS_LHS is true if the
471 reference is supposed to be an lvalue. */
474 maybe_fold_reference (tree expr, bool is_lhs)
480 && (result = fold_const_aggregate_ref (expr))
481 && is_gimple_min_invariant (result))
484 /* ??? We might want to open-code the relevant remaining cases
485 to avoid using the generic fold. */
486 if (handled_component_p (*t)
487 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
489 tree tem = fold (*t);
494 while (handled_component_p (*t))
495 t = &TREE_OPERAND (*t, 0);
497 /* Fold back MEM_REFs to reference trees. */
498 if (TREE_CODE (*t) == MEM_REF
499 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
500 && integer_zerop (TREE_OPERAND (*t, 1))
501 && (TREE_THIS_VOLATILE (*t)
502 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
503 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
504 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
505 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
506 /* We have to look out here to not drop a required conversion
507 from the rhs to the lhs if is_lhs, but we don't have the
508 rhs here to verify that. Thus require strict type
510 && types_compatible_p (TREE_TYPE (*t),
511 TREE_TYPE (TREE_OPERAND
512 (TREE_OPERAND (*t, 0), 0))))
515 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
516 tem = maybe_fold_reference (expr, is_lhs);
521 /* Canonicalize MEM_REFs invariant address operand. */
522 else if (TREE_CODE (*t) == MEM_REF
523 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
524 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
525 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
527 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
528 TREE_OPERAND (*t, 0),
529 TREE_OPERAND (*t, 1));
533 tem = maybe_fold_reference (expr, is_lhs);
539 else if (TREE_CODE (*t) == TARGET_MEM_REF)
541 tree tem = maybe_fold_tmr (*t);
545 tem = maybe_fold_reference (expr, is_lhs);
554 tree tem = get_symbol_constant_value (*t);
556 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
558 *t = unshare_expr (tem);
559 tem = maybe_fold_reference (expr, is_lhs);
570 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
571 replacement rhs for the statement or NULL_TREE if no simplification
572 could be made. It is assumed that the operands have been previously
576 fold_gimple_assign (gimple_stmt_iterator *si)
578 gimple stmt = gsi_stmt (*si);
579 enum tree_code subcode = gimple_assign_rhs_code (stmt);
580 location_t loc = gimple_location (stmt);
582 tree result = NULL_TREE;
584 switch (get_gimple_rhs_class (subcode))
586 case GIMPLE_SINGLE_RHS:
588 tree rhs = gimple_assign_rhs1 (stmt);
590 /* Try to fold a conditional expression. */
591 if (TREE_CODE (rhs) == COND_EXPR)
593 tree op0 = COND_EXPR_COND (rhs);
596 location_t cond_loc = EXPR_LOCATION (rhs);
598 if (COMPARISON_CLASS_P (op0))
600 fold_defer_overflow_warnings ();
601 tem = fold_binary_loc (cond_loc,
602 TREE_CODE (op0), TREE_TYPE (op0),
603 TREE_OPERAND (op0, 0),
604 TREE_OPERAND (op0, 1));
605 /* This is actually a conditional expression, not a GIMPLE
606 conditional statement, however, the valid_gimple_rhs_p
607 test still applies. */
608 set = (tem && is_gimple_condexpr (tem)
609 && valid_gimple_rhs_p (tem));
610 fold_undefer_overflow_warnings (set, stmt, 0);
612 else if (is_gimple_min_invariant (op0))
621 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
622 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
625 else if (REFERENCE_CLASS_P (rhs))
626 return maybe_fold_reference (rhs, false);
628 else if (TREE_CODE (rhs) == ADDR_EXPR)
630 tree ref = TREE_OPERAND (rhs, 0);
631 tree tem = maybe_fold_reference (ref, true);
633 && TREE_CODE (tem) == MEM_REF
634 && integer_zerop (TREE_OPERAND (tem, 1)))
635 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
637 result = fold_convert (TREE_TYPE (rhs),
638 build_fold_addr_expr_loc (loc, tem));
639 else if (TREE_CODE (ref) == MEM_REF
640 && integer_zerop (TREE_OPERAND (ref, 1)))
641 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
644 else if (TREE_CODE (rhs) == CONSTRUCTOR
645 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
646 && (CONSTRUCTOR_NELTS (rhs)
647 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
649 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
653 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
654 if (TREE_CODE (val) != INTEGER_CST
655 && TREE_CODE (val) != REAL_CST
656 && TREE_CODE (val) != FIXED_CST)
659 return build_vector_from_ctor (TREE_TYPE (rhs),
660 CONSTRUCTOR_ELTS (rhs));
663 else if (DECL_P (rhs))
664 return unshare_expr (get_symbol_constant_value (rhs));
666 /* If we couldn't fold the RHS, hand over to the generic
668 if (result == NULL_TREE)
671 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
672 that may have been added by fold, and "useless" type
673 conversions that might now be apparent due to propagation. */
674 STRIP_USELESS_TYPE_CONVERSION (result);
676 if (result != rhs && valid_gimple_rhs_p (result))
683 case GIMPLE_UNARY_RHS:
685 tree rhs = gimple_assign_rhs1 (stmt);
687 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
690 /* If the operation was a conversion do _not_ mark a
691 resulting constant with TREE_OVERFLOW if the original
692 constant was not. These conversions have implementation
693 defined behavior and retaining the TREE_OVERFLOW flag
694 here would confuse later passes such as VRP. */
695 if (CONVERT_EXPR_CODE_P (subcode)
696 && TREE_CODE (result) == INTEGER_CST
697 && TREE_CODE (rhs) == INTEGER_CST)
698 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
700 STRIP_USELESS_TYPE_CONVERSION (result);
701 if (valid_gimple_rhs_p (result))
704 else if (CONVERT_EXPR_CODE_P (subcode)
705 && POINTER_TYPE_P (gimple_expr_type (stmt))
706 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
708 tree type = gimple_expr_type (stmt);
709 tree t = maybe_fold_offset_to_address (loc,
710 gimple_assign_rhs1 (stmt),
711 integer_zero_node, type);
718 case GIMPLE_BINARY_RHS:
719 /* Try to fold pointer addition. */
720 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
722 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
723 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
725 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
726 if (!useless_type_conversion_p
727 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
728 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
730 result = maybe_fold_stmt_addition (gimple_location (stmt),
732 gimple_assign_rhs1 (stmt),
733 gimple_assign_rhs2 (stmt));
737 result = fold_binary_loc (loc, subcode,
738 TREE_TYPE (gimple_assign_lhs (stmt)),
739 gimple_assign_rhs1 (stmt),
740 gimple_assign_rhs2 (stmt));
744 STRIP_USELESS_TYPE_CONVERSION (result);
745 if (valid_gimple_rhs_p (result))
748 /* Fold might have produced non-GIMPLE, so if we trust it blindly
749 we lose canonicalization opportunities. Do not go again
750 through fold here though, or the same non-GIMPLE will be
752 if (commutative_tree_code (subcode)
753 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
754 gimple_assign_rhs2 (stmt), false))
755 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
756 gimple_assign_rhs2 (stmt),
757 gimple_assign_rhs1 (stmt));
761 case GIMPLE_TERNARY_RHS:
762 result = fold_ternary_loc (loc, subcode,
763 TREE_TYPE (gimple_assign_lhs (stmt)),
764 gimple_assign_rhs1 (stmt),
765 gimple_assign_rhs2 (stmt),
766 gimple_assign_rhs3 (stmt));
770 STRIP_USELESS_TYPE_CONVERSION (result);
771 if (valid_gimple_rhs_p (result))
774 /* Fold might have produced non-GIMPLE, so if we trust it blindly
775 we lose canonicalization opportunities. Do not go again
776 through fold here though, or the same non-GIMPLE will be
778 if (commutative_ternary_tree_code (subcode)
779 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
780 gimple_assign_rhs2 (stmt), false))
781 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
782 gimple_assign_rhs2 (stmt),
783 gimple_assign_rhs1 (stmt),
784 gimple_assign_rhs3 (stmt));
788 case GIMPLE_INVALID_RHS:
795 /* Attempt to fold a conditional statement. Return true if any changes were
796 made. We only attempt to fold the condition expression, and do not perform
797 any transformation that would require alteration of the cfg. It is
798 assumed that the operands have been previously folded. */
801 fold_gimple_cond (gimple stmt)
803 tree result = fold_binary_loc (gimple_location (stmt),
804 gimple_cond_code (stmt),
806 gimple_cond_lhs (stmt),
807 gimple_cond_rhs (stmt));
811 STRIP_USELESS_TYPE_CONVERSION (result);
812 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
814 gimple_cond_set_condition_from_tree (stmt, result);
822 /* Convert EXPR into a GIMPLE value suitable for substitution on the
823 RHS of an assignment. Insert the necessary statements before
824 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
825 is replaced. If the call is expected to produces a result, then it
826 is replaced by an assignment of the new RHS to the result variable.
827 If the result is to be ignored, then the call is replaced by a
828 GIMPLE_NOP. A proper VDEF chain is retained by making the first
829 VUSE and the last VDEF of the whole sequence be the same as the replaced
830 statement and using new SSA names for stores in between. */
833 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
836 tree tmp = NULL_TREE; /* Silence warning. */
837 gimple stmt, new_stmt;
838 gimple_stmt_iterator i;
839 gimple_seq stmts = gimple_seq_alloc();
840 struct gimplify_ctx gctx;
842 gimple laststore = NULL;
845 stmt = gsi_stmt (*si_p);
847 gcc_assert (is_gimple_call (stmt));
849 lhs = gimple_call_lhs (stmt);
850 reaching_vuse = gimple_vuse (stmt);
852 push_gimplify_context (&gctx);
854 if (lhs == NULL_TREE)
855 gimplify_and_add (expr, &stmts);
857 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
859 pop_gimplify_context (NULL);
861 if (gimple_has_location (stmt))
862 annotate_all_with_location (stmts, gimple_location (stmt));
864 /* The replacement can expose previously unreferenced variables. */
865 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
869 gsi_insert_before (si_p, last, GSI_NEW_STMT);
872 new_stmt = gsi_stmt (i);
873 if (gimple_in_ssa_p (cfun))
875 find_new_referenced_vars (new_stmt);
876 mark_symbols_for_renaming (new_stmt);
878 /* If the new statement has a VUSE, update it with exact SSA name we
879 know will reach this one. */
880 if (gimple_vuse (new_stmt))
882 /* If we've also seen a previous store create a new VDEF for
883 the latter one, and make that the new reaching VUSE. */
886 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
887 gimple_set_vdef (laststore, reaching_vuse);
888 update_stmt (laststore);
891 gimple_set_vuse (new_stmt, reaching_vuse);
892 gimple_set_modified (new_stmt, true);
894 if (gimple_assign_single_p (new_stmt)
895 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
897 laststore = new_stmt;
902 if (lhs == NULL_TREE)
904 /* If we replace a call without LHS that has a VDEF and our new
905 sequence ends with a store we must make that store have the same
906 vdef in order not to break the sequencing. This can happen
907 for instance when folding memcpy calls into assignments. */
908 if (gimple_vdef (stmt) && laststore)
910 gimple_set_vdef (laststore, gimple_vdef (stmt));
911 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
912 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
913 update_stmt (laststore);
915 else if (gimple_in_ssa_p (cfun))
917 unlink_stmt_vdef (stmt);
926 gsi_insert_before (si_p, last, GSI_NEW_STMT);
929 if (laststore && is_gimple_reg (lhs))
931 gimple_set_vdef (laststore, gimple_vdef (stmt));
932 update_stmt (laststore);
933 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
934 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
939 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
940 gimple_set_vdef (laststore, reaching_vuse);
941 update_stmt (laststore);
944 new_stmt = gimple_build_assign (lhs, tmp);
945 if (!is_gimple_reg (tmp))
946 gimple_set_vuse (new_stmt, reaching_vuse);
947 if (!is_gimple_reg (lhs))
949 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
950 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
951 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
955 gimple_set_location (new_stmt, gimple_location (stmt));
956 gsi_replace (si_p, new_stmt, false);
959 /* Return the string length, maximum string length or maximum value of
961 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
962 is not NULL and, for TYPE == 0, its value is not equal to the length
963 we determine or if we are unable to determine the length or value,
964 return false. VISITED is a bitmap of visited variables.
965 TYPE is 0 if string length should be returned, 1 for maximum string
966 length and 2 for maximum value ARG can have. */
969 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
974 if (TREE_CODE (arg) != SSA_NAME)
976 if (TREE_CODE (arg) == COND_EXPR)
977 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
978 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
979 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
980 else if (TREE_CODE (arg) == ADDR_EXPR
981 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
982 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
984 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
985 if (TREE_CODE (aop0) == INDIRECT_REF
986 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
987 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
988 length, visited, type);
994 if (TREE_CODE (val) != INTEGER_CST
995 || tree_int_cst_sgn (val) < 0)
999 val = c_strlen (arg, 1);
1007 if (TREE_CODE (*length) != INTEGER_CST
1008 || TREE_CODE (val) != INTEGER_CST)
1011 if (tree_int_cst_lt (*length, val))
1015 else if (simple_cst_equal (val, *length) != 1)
1023 /* If we were already here, break the infinite cycle. */
1024 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1028 def_stmt = SSA_NAME_DEF_STMT (var);
1030 switch (gimple_code (def_stmt))
1033 /* The RHS of the statement defining VAR must either have a
1034 constant length or come from another SSA_NAME with a constant
1036 if (gimple_assign_single_p (def_stmt)
1037 || gimple_assign_unary_nop_p (def_stmt))
1039 tree rhs = gimple_assign_rhs1 (def_stmt);
1040 return get_maxval_strlen (rhs, length, visited, type);
1046 /* All the arguments of the PHI node must have the same constant
1050 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1052 tree arg = gimple_phi_arg (def_stmt, i)->def;
1054 /* If this PHI has itself as an argument, we cannot
1055 determine the string length of this argument. However,
1056 if we can find a constant string length for the other
1057 PHI args then we can still be sure that this is a
1058 constant string length. So be optimistic and just
1059 continue with the next argument. */
1060 if (arg == gimple_phi_result (def_stmt))
1063 if (!get_maxval_strlen (arg, length, visited, type))
1075 /* Fold builtin call in statement STMT. Returns a simplified tree.
1076 We may return a non-constant expression, including another call
1077 to a different function and with different arguments, e.g.,
1078 substituting memcpy for strcpy when the string length is known.
1079 Note that some builtins expand into inline code that may not
1080 be valid in GIMPLE. Callers must take care. */
1083 gimple_fold_builtin (gimple stmt)
1085 tree result, val[3];
1091 location_t loc = gimple_location (stmt);
1093 gcc_assert (is_gimple_call (stmt));
1095 ignore = (gimple_call_lhs (stmt) == NULL);
1097 /* First try the generic builtin folder. If that succeeds, return the
1099 result = fold_call_stmt (stmt, ignore);
1103 STRIP_NOPS (result);
1107 /* Ignore MD builtins. */
1108 callee = gimple_call_fndecl (stmt);
1109 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1112 /* If the builtin could not be folded, and it has no argument list,
1114 nargs = gimple_call_num_args (stmt);
1118 /* Limit the work only for builtins we know how to simplify. */
1119 switch (DECL_FUNCTION_CODE (callee))
1121 case BUILT_IN_STRLEN:
1122 case BUILT_IN_FPUTS:
1123 case BUILT_IN_FPUTS_UNLOCKED:
1127 case BUILT_IN_STRCPY:
1128 case BUILT_IN_STRNCPY:
1132 case BUILT_IN_MEMCPY_CHK:
1133 case BUILT_IN_MEMPCPY_CHK:
1134 case BUILT_IN_MEMMOVE_CHK:
1135 case BUILT_IN_MEMSET_CHK:
1136 case BUILT_IN_STRNCPY_CHK:
1140 case BUILT_IN_STRCPY_CHK:
1141 case BUILT_IN_STPCPY_CHK:
1145 case BUILT_IN_SNPRINTF_CHK:
1146 case BUILT_IN_VSNPRINTF_CHK:
1154 if (arg_idx >= nargs)
1157 /* Try to use the dataflow information gathered by the CCP process. */
1158 visited = BITMAP_ALLOC (NULL);
1159 bitmap_clear (visited);
1161 memset (val, 0, sizeof (val));
1162 a = gimple_call_arg (stmt, arg_idx);
1163 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1164 val[arg_idx] = NULL_TREE;
1166 BITMAP_FREE (visited);
1169 switch (DECL_FUNCTION_CODE (callee))
1171 case BUILT_IN_STRLEN:
1172 if (val[0] && nargs == 1)
1175 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1177 /* If the result is not a valid gimple value, or not a cast
1178 of a valid gimple value, then we cannot use the result. */
1179 if (is_gimple_val (new_val)
1180 || (is_gimple_cast (new_val)
1181 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1186 case BUILT_IN_STRCPY:
1187 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1188 result = fold_builtin_strcpy (loc, callee,
1189 gimple_call_arg (stmt, 0),
1190 gimple_call_arg (stmt, 1),
1194 case BUILT_IN_STRNCPY:
1195 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1196 result = fold_builtin_strncpy (loc, callee,
1197 gimple_call_arg (stmt, 0),
1198 gimple_call_arg (stmt, 1),
1199 gimple_call_arg (stmt, 2),
1203 case BUILT_IN_FPUTS:
1205 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1206 gimple_call_arg (stmt, 1),
1207 ignore, false, val[0]);
1210 case BUILT_IN_FPUTS_UNLOCKED:
1212 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1213 gimple_call_arg (stmt, 1),
1214 ignore, true, val[0]);
1217 case BUILT_IN_MEMCPY_CHK:
1218 case BUILT_IN_MEMPCPY_CHK:
1219 case BUILT_IN_MEMMOVE_CHK:
1220 case BUILT_IN_MEMSET_CHK:
1221 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1222 result = fold_builtin_memory_chk (loc, callee,
1223 gimple_call_arg (stmt, 0),
1224 gimple_call_arg (stmt, 1),
1225 gimple_call_arg (stmt, 2),
1226 gimple_call_arg (stmt, 3),
1228 DECL_FUNCTION_CODE (callee));
1231 case BUILT_IN_STRCPY_CHK:
1232 case BUILT_IN_STPCPY_CHK:
1233 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1234 result = fold_builtin_stxcpy_chk (loc, callee,
1235 gimple_call_arg (stmt, 0),
1236 gimple_call_arg (stmt, 1),
1237 gimple_call_arg (stmt, 2),
1239 DECL_FUNCTION_CODE (callee));
1242 case BUILT_IN_STRNCPY_CHK:
1243 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1244 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1245 gimple_call_arg (stmt, 1),
1246 gimple_call_arg (stmt, 2),
1247 gimple_call_arg (stmt, 3),
1251 case BUILT_IN_SNPRINTF_CHK:
1252 case BUILT_IN_VSNPRINTF_CHK:
1253 if (val[1] && is_gimple_val (val[1]))
1254 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1255 DECL_FUNCTION_CODE (callee));
1262 if (result && ignore)
1263 result = fold_ignored_result (result);
1267 /* Return the first of the base binfos of BINFO that has virtual functions. */
1270 get_first_base_binfo_with_virtuals (tree binfo)
1275 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1276 if (BINFO_VIRTUALS (base_binfo))
1283 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1284 it is found or NULL_TREE if it is not. */
1287 get_base_binfo_for_type (tree binfo, tree type)
1291 tree res = NULL_TREE;
1293 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1294 if (TREE_TYPE (base_binfo) == type)
1303 /* Return a binfo describing the part of object referenced by expression REF.
1304 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1305 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1306 a simple declaration, indirect reference or an SSA_NAME. If the function
1307 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1308 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1309 Otherwise the first non-artificial field declaration or the base declaration
1310 will be examined to get the encapsulating type. */
1313 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1317 if (TREE_CODE (ref) == COMPONENT_REF)
1320 tree binfo, base_binfo;
1321 tree field = TREE_OPERAND (ref, 1);
1323 if (!DECL_ARTIFICIAL (field))
1325 tree type = TREE_TYPE (field);
1326 if (TREE_CODE (type) == RECORD_TYPE)
1327 return TYPE_BINFO (type);
1332 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1333 binfo = TYPE_BINFO (par_type);
1335 || BINFO_N_BASE_BINFOS (binfo) == 0)
1338 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1339 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1343 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1345 /* Get descendant binfo. */
1348 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1351 ref = TREE_OPERAND (ref, 0);
1353 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1354 return TYPE_BINFO (TREE_TYPE (ref));
1355 else if (known_binfo
1356 && (TREE_CODE (ref) == SSA_NAME
1357 || TREE_CODE (ref) == MEM_REF))
1364 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1365 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1366 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1369 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1373 struct cgraph_node *node;
1375 v = BINFO_VIRTUALS (known_binfo);
1379 i += (TARGET_VTABLE_USES_DESCRIPTORS
1380 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1384 fndecl = TREE_VALUE (v);
1385 node = cgraph_get_node (fndecl);
1386 /* When cgraph node is missing and function is not public, we cannot
1387 devirtualize. This can happen in WHOPR when the actual method
1388 ends up in other partition, because we found devirtualization
1389 possibility too late. */
1390 if ((!node || (!node->analyzed && !node->in_other_partition))
1391 && (!TREE_PUBLIC (fndecl) || DECL_COMDAT (fndecl)))
1393 return build_fold_addr_expr (fndecl);
1397 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1398 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1399 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1400 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1401 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1404 gimple_fold_obj_type_ref (tree ref, tree known_type)
1406 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1407 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1410 if (TREE_CODE (obj) == ADDR_EXPR)
1411 obj = TREE_OPERAND (obj, 0);
1413 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1416 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1417 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1423 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1424 The statement may be replaced by another statement, e.g., if the call
1425 simplifies to a constant value. Return true if any changes were made.
1426 It is assumed that the operands have been previously folded. */
1429 fold_gimple_call (gimple_stmt_iterator *gsi)
1431 gimple stmt = gsi_stmt (*gsi);
1433 tree callee = gimple_call_fndecl (stmt);
1435 /* Check for builtins that CCP can handle using information not
1436 available in the generic fold routines. */
1437 if (callee && DECL_BUILT_IN (callee))
1439 tree result = gimple_fold_builtin (stmt);
1443 if (!update_call_from_tree (gsi, result))
1444 gimplify_and_update_call_from_tree (gsi, result);
1450 /* ??? Should perhaps do this in fold proper. However, doing it
1451 there requires that we create a new CALL_EXPR, and that requires
1452 copying EH region info to the new node. Easier to just do it
1453 here where we can just smash the call operand. */
1454 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1455 callee = gimple_call_fn (stmt);
1456 if (TREE_CODE (callee) == OBJ_TYPE_REF
1457 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1461 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1464 gimple_call_set_fn (stmt, t);
1473 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1474 distinguishes both cases. */
1477 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1479 bool changed = false;
1480 gimple stmt = gsi_stmt (*gsi);
1483 /* Fold the main computation performed by the statement. */
1484 switch (gimple_code (stmt))
1488 unsigned old_num_ops = gimple_num_ops (stmt);
1489 tree new_rhs = fold_gimple_assign (gsi);
1490 tree lhs = gimple_assign_lhs (stmt);
1492 && !useless_type_conversion_p (TREE_TYPE (lhs),
1493 TREE_TYPE (new_rhs)))
1494 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1497 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1499 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1506 changed |= fold_gimple_cond (stmt);
1510 /* Fold *& in call arguments. */
1511 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1512 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1514 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1517 gimple_call_set_arg (stmt, i, tmp);
1521 /* The entire statement may be replaced in this case. */
1523 changed |= fold_gimple_call (gsi);
1527 /* Fold *& in asm operands. */
1528 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1530 tree link = gimple_asm_output_op (stmt, i);
1531 tree op = TREE_VALUE (link);
1532 if (REFERENCE_CLASS_P (op)
1533 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1535 TREE_VALUE (link) = op;
1539 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1541 tree link = gimple_asm_input_op (stmt, i);
1542 tree op = TREE_VALUE (link);
1543 if (REFERENCE_CLASS_P (op)
1544 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1546 TREE_VALUE (link) = op;
1553 if (gimple_debug_bind_p (stmt))
1555 tree val = gimple_debug_bind_get_value (stmt);
1557 && REFERENCE_CLASS_P (val))
1559 tree tem = maybe_fold_reference (val, false);
1562 gimple_debug_bind_set_value (stmt, tem);
1572 stmt = gsi_stmt (*gsi);
1574 /* Fold *& on the lhs. */
1575 if (gimple_has_lhs (stmt))
1577 tree lhs = gimple_get_lhs (stmt);
1578 if (lhs && REFERENCE_CLASS_P (lhs))
1580 tree new_lhs = maybe_fold_reference (lhs, true);
1583 gimple_set_lhs (stmt, new_lhs);
1592 /* Fold the statement pointed to by GSI. In some cases, this function may
1593 replace the whole statement with a new one. Returns true iff folding
1595 The statement pointed to by GSI should be in valid gimple form but may
1596 be in unfolded state as resulting from for example constant propagation
1597 which can produce *&x = 0. */
1600 fold_stmt (gimple_stmt_iterator *gsi)
1602 return fold_stmt_1 (gsi, false);
1605 /* Perform the minimal folding on statement STMT. Only operations like
1606 *&x created by constant propagation are handled. The statement cannot
1607 be replaced with a new one. Return true if the statement was
1608 changed, false otherwise.
1609 The statement STMT should be in valid gimple form but may
1610 be in unfolded state as resulting from for example constant propagation
1611 which can produce *&x = 0. */
1614 fold_stmt_inplace (gimple stmt)
1616 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1617 bool changed = fold_stmt_1 (&gsi, true);
1618 gcc_assert (gsi_stmt (gsi) == stmt);
1622 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1623 if EXPR is null or we don't know how.
1624 If non-null, the result always has boolean type. */
1627 canonicalize_bool (tree expr, bool invert)
1633 if (integer_nonzerop (expr))
1634 return boolean_false_node;
1635 else if (integer_zerop (expr))
1636 return boolean_true_node;
1637 else if (TREE_CODE (expr) == SSA_NAME)
1638 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1639 build_int_cst (TREE_TYPE (expr), 0));
1640 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1641 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1643 TREE_OPERAND (expr, 0),
1644 TREE_OPERAND (expr, 1));
1650 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1652 if (integer_nonzerop (expr))
1653 return boolean_true_node;
1654 else if (integer_zerop (expr))
1655 return boolean_false_node;
1656 else if (TREE_CODE (expr) == SSA_NAME)
1657 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1658 build_int_cst (TREE_TYPE (expr), 0));
1659 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1660 return fold_build2 (TREE_CODE (expr),
1662 TREE_OPERAND (expr, 0),
1663 TREE_OPERAND (expr, 1));
1669 /* Check to see if a boolean expression EXPR is logically equivalent to the
1670 comparison (OP1 CODE OP2). Check for various identities involving
1674 same_bool_comparison_p (const_tree expr, enum tree_code code,
1675 const_tree op1, const_tree op2)
1679 /* The obvious case. */
1680 if (TREE_CODE (expr) == code
1681 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1682 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1685 /* Check for comparing (name, name != 0) and the case where expr
1686 is an SSA_NAME with a definition matching the comparison. */
1687 if (TREE_CODE (expr) == SSA_NAME
1688 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1690 if (operand_equal_p (expr, op1, 0))
1691 return ((code == NE_EXPR && integer_zerop (op2))
1692 || (code == EQ_EXPR && integer_nonzerop (op2)));
1693 s = SSA_NAME_DEF_STMT (expr);
1694 if (is_gimple_assign (s)
1695 && gimple_assign_rhs_code (s) == code
1696 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1697 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1701 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1702 of name is a comparison, recurse. */
1703 if (TREE_CODE (op1) == SSA_NAME
1704 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1706 s = SSA_NAME_DEF_STMT (op1);
1707 if (is_gimple_assign (s)
1708 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1710 enum tree_code c = gimple_assign_rhs_code (s);
1711 if ((c == NE_EXPR && integer_zerop (op2))
1712 || (c == EQ_EXPR && integer_nonzerop (op2)))
1713 return same_bool_comparison_p (expr, c,
1714 gimple_assign_rhs1 (s),
1715 gimple_assign_rhs2 (s));
1716 if ((c == EQ_EXPR && integer_zerop (op2))
1717 || (c == NE_EXPR && integer_nonzerop (op2)))
1718 return same_bool_comparison_p (expr,
1719 invert_tree_comparison (c, false),
1720 gimple_assign_rhs1 (s),
1721 gimple_assign_rhs2 (s));
1727 /* Check to see if two boolean expressions OP1 and OP2 are logically
1731 same_bool_result_p (const_tree op1, const_tree op2)
1733 /* Simple cases first. */
1734 if (operand_equal_p (op1, op2, 0))
1737 /* Check the cases where at least one of the operands is a comparison.
1738 These are a bit smarter than operand_equal_p in that they apply some
1739 identifies on SSA_NAMEs. */
1740 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1741 && same_bool_comparison_p (op1, TREE_CODE (op2),
1742 TREE_OPERAND (op2, 0),
1743 TREE_OPERAND (op2, 1)))
1745 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1746 && same_bool_comparison_p (op2, TREE_CODE (op1),
1747 TREE_OPERAND (op1, 0),
1748 TREE_OPERAND (op1, 1)))
1755 /* Forward declarations for some mutually recursive functions. */
1758 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1759 enum tree_code code2, tree op2a, tree op2b);
1761 and_var_with_comparison (tree var, bool invert,
1762 enum tree_code code2, tree op2a, tree op2b);
1764 and_var_with_comparison_1 (gimple stmt,
1765 enum tree_code code2, tree op2a, tree op2b);
1767 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1768 enum tree_code code2, tree op2a, tree op2b);
1770 or_var_with_comparison (tree var, bool invert,
1771 enum tree_code code2, tree op2a, tree op2b);
1773 or_var_with_comparison_1 (gimple stmt,
1774 enum tree_code code2, tree op2a, tree op2b);
1776 /* Helper function for and_comparisons_1: try to simplify the AND of the
1777 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1778 If INVERT is true, invert the value of the VAR before doing the AND.
1779 Return NULL_EXPR if we can't simplify this to a single expression. */
1782 and_var_with_comparison (tree var, bool invert,
1783 enum tree_code code2, tree op2a, tree op2b)
1786 gimple stmt = SSA_NAME_DEF_STMT (var);
1788 /* We can only deal with variables whose definitions are assignments. */
1789 if (!is_gimple_assign (stmt))
1792 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1793 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1794 Then we only have to consider the simpler non-inverted cases. */
1796 t = or_var_with_comparison_1 (stmt,
1797 invert_tree_comparison (code2, false),
1800 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1801 return canonicalize_bool (t, invert);
1804 /* Try to simplify the AND of the ssa variable defined by the assignment
1805 STMT with the comparison specified by (OP2A CODE2 OP2B).
1806 Return NULL_EXPR if we can't simplify this to a single expression. */
1809 and_var_with_comparison_1 (gimple stmt,
1810 enum tree_code code2, tree op2a, tree op2b)
1812 tree var = gimple_assign_lhs (stmt);
1813 tree true_test_var = NULL_TREE;
1814 tree false_test_var = NULL_TREE;
1815 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1817 /* Check for identities like (var AND (var == 0)) => false. */
1818 if (TREE_CODE (op2a) == SSA_NAME
1819 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1821 if ((code2 == NE_EXPR && integer_zerop (op2b))
1822 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1824 true_test_var = op2a;
1825 if (var == true_test_var)
1828 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1829 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1831 false_test_var = op2a;
1832 if (var == false_test_var)
1833 return boolean_false_node;
1837 /* If the definition is a comparison, recurse on it. */
1838 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1840 tree t = and_comparisons_1 (innercode,
1841 gimple_assign_rhs1 (stmt),
1842 gimple_assign_rhs2 (stmt),
1850 /* If the definition is an AND or OR expression, we may be able to
1851 simplify by reassociating. */
1852 if (innercode == TRUTH_AND_EXPR
1853 || innercode == TRUTH_OR_EXPR
1854 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1855 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1857 tree inner1 = gimple_assign_rhs1 (stmt);
1858 tree inner2 = gimple_assign_rhs2 (stmt);
1861 tree partial = NULL_TREE;
1862 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1864 /* Check for boolean identities that don't require recursive examination
1866 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1867 inner1 AND (inner1 OR inner2) => inner1
1868 !inner1 AND (inner1 AND inner2) => false
1869 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1870 Likewise for similar cases involving inner2. */
1871 if (inner1 == true_test_var)
1872 return (is_and ? var : inner1);
1873 else if (inner2 == true_test_var)
1874 return (is_and ? var : inner2);
1875 else if (inner1 == false_test_var)
1877 ? boolean_false_node
1878 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1879 else if (inner2 == false_test_var)
1881 ? boolean_false_node
1882 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1884 /* Next, redistribute/reassociate the AND across the inner tests.
1885 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1886 if (TREE_CODE (inner1) == SSA_NAME
1887 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1888 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1889 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1890 gimple_assign_rhs1 (s),
1891 gimple_assign_rhs2 (s),
1892 code2, op2a, op2b)))
1894 /* Handle the AND case, where we are reassociating:
1895 (inner1 AND inner2) AND (op2a code2 op2b)
1897 If the partial result t is a constant, we win. Otherwise
1898 continue on to try reassociating with the other inner test. */
1901 if (integer_onep (t))
1903 else if (integer_zerop (t))
1904 return boolean_false_node;
1907 /* Handle the OR case, where we are redistributing:
1908 (inner1 OR inner2) AND (op2a code2 op2b)
1909 => (t OR (inner2 AND (op2a code2 op2b))) */
1912 if (integer_onep (t))
1913 return boolean_true_node;
1915 /* Save partial result for later. */
1920 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1921 if (TREE_CODE (inner2) == SSA_NAME
1922 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1923 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1924 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1925 gimple_assign_rhs1 (s),
1926 gimple_assign_rhs2 (s),
1927 code2, op2a, op2b)))
1929 /* Handle the AND case, where we are reassociating:
1930 (inner1 AND inner2) AND (op2a code2 op2b)
1931 => (inner1 AND t) */
1934 if (integer_onep (t))
1936 else if (integer_zerop (t))
1937 return boolean_false_node;
1940 /* Handle the OR case. where we are redistributing:
1941 (inner1 OR inner2) AND (op2a code2 op2b)
1942 => (t OR (inner1 AND (op2a code2 op2b)))
1943 => (t OR partial) */
1946 if (integer_onep (t))
1947 return boolean_true_node;
1950 /* We already got a simplification for the other
1951 operand to the redistributed OR expression. The
1952 interesting case is when at least one is false.
1953 Or, if both are the same, we can apply the identity
1955 if (integer_zerop (partial))
1957 else if (integer_zerop (t))
1959 else if (same_bool_result_p (t, partial))
1968 /* Try to simplify the AND of two comparisons defined by
1969 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1970 If this can be done without constructing an intermediate value,
1971 return the resulting tree; otherwise NULL_TREE is returned.
1972 This function is deliberately asymmetric as it recurses on SSA_DEFs
1973 in the first comparison but not the second. */
1976 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1977 enum tree_code code2, tree op2a, tree op2b)
1979 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1980 if (operand_equal_p (op1a, op2a, 0)
1981 && operand_equal_p (op1b, op2b, 0))
1983 tree t = combine_comparisons (UNKNOWN_LOCATION,
1984 TRUTH_ANDIF_EXPR, code1, code2,
1985 boolean_type_node, op1a, op1b);
1990 /* Likewise the swapped case of the above. */
1991 if (operand_equal_p (op1a, op2b, 0)
1992 && operand_equal_p (op1b, op2a, 0))
1994 tree t = combine_comparisons (UNKNOWN_LOCATION,
1995 TRUTH_ANDIF_EXPR, code1,
1996 swap_tree_comparison (code2),
1997 boolean_type_node, op1a, op1b);
2002 /* If both comparisons are of the same value against constants, we might
2003 be able to merge them. */
2004 if (operand_equal_p (op1a, op2a, 0)
2005 && TREE_CODE (op1b) == INTEGER_CST
2006 && TREE_CODE (op2b) == INTEGER_CST)
2008 int cmp = tree_int_cst_compare (op1b, op2b);
2010 /* If we have (op1a == op1b), we should either be able to
2011 return that or FALSE, depending on whether the constant op1b
2012 also satisfies the other comparison against op2b. */
2013 if (code1 == EQ_EXPR)
2019 case EQ_EXPR: val = (cmp == 0); break;
2020 case NE_EXPR: val = (cmp != 0); break;
2021 case LT_EXPR: val = (cmp < 0); break;
2022 case GT_EXPR: val = (cmp > 0); break;
2023 case LE_EXPR: val = (cmp <= 0); break;
2024 case GE_EXPR: val = (cmp >= 0); break;
2025 default: done = false;
2030 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2032 return boolean_false_node;
2035 /* Likewise if the second comparison is an == comparison. */
2036 else if (code2 == EQ_EXPR)
2042 case EQ_EXPR: val = (cmp == 0); break;
2043 case NE_EXPR: val = (cmp != 0); break;
2044 case LT_EXPR: val = (cmp > 0); break;
2045 case GT_EXPR: val = (cmp < 0); break;
2046 case LE_EXPR: val = (cmp >= 0); break;
2047 case GE_EXPR: val = (cmp <= 0); break;
2048 default: done = false;
2053 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2055 return boolean_false_node;
2059 /* Same business with inequality tests. */
2060 else if (code1 == NE_EXPR)
2065 case EQ_EXPR: val = (cmp != 0); break;
2066 case NE_EXPR: val = (cmp == 0); break;
2067 case LT_EXPR: val = (cmp >= 0); break;
2068 case GT_EXPR: val = (cmp <= 0); break;
2069 case LE_EXPR: val = (cmp > 0); break;
2070 case GE_EXPR: val = (cmp < 0); break;
2075 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2077 else if (code2 == NE_EXPR)
2082 case EQ_EXPR: val = (cmp == 0); break;
2083 case NE_EXPR: val = (cmp != 0); break;
2084 case LT_EXPR: val = (cmp <= 0); break;
2085 case GT_EXPR: val = (cmp >= 0); break;
2086 case LE_EXPR: val = (cmp < 0); break;
2087 case GE_EXPR: val = (cmp > 0); break;
2092 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2095 /* Chose the more restrictive of two < or <= comparisons. */
2096 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2097 && (code2 == LT_EXPR || code2 == LE_EXPR))
2099 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2100 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2102 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2105 /* Likewise chose the more restrictive of two > or >= comparisons. */
2106 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2107 && (code2 == GT_EXPR || code2 == GE_EXPR))
2109 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2110 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2112 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2115 /* Check for singleton ranges. */
2117 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2118 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2119 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2121 /* Check for disjoint ranges. */
2123 && (code1 == LT_EXPR || code1 == LE_EXPR)
2124 && (code2 == GT_EXPR || code2 == GE_EXPR))
2125 return boolean_false_node;
2127 && (code1 == GT_EXPR || code1 == GE_EXPR)
2128 && (code2 == LT_EXPR || code2 == LE_EXPR))
2129 return boolean_false_node;
2132 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2133 NAME's definition is a truth value. See if there are any simplifications
2134 that can be done against the NAME's definition. */
2135 if (TREE_CODE (op1a) == SSA_NAME
2136 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2137 && (integer_zerop (op1b) || integer_onep (op1b)))
2139 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2140 || (code1 == NE_EXPR && integer_onep (op1b)));
2141 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2142 switch (gimple_code (stmt))
2145 /* Try to simplify by copy-propagating the definition. */
2146 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2149 /* If every argument to the PHI produces the same result when
2150 ANDed with the second comparison, we win.
2151 Do not do this unless the type is bool since we need a bool
2152 result here anyway. */
2153 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2155 tree result = NULL_TREE;
2157 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2159 tree arg = gimple_phi_arg_def (stmt, i);
2161 /* If this PHI has itself as an argument, ignore it.
2162 If all the other args produce the same result,
2164 if (arg == gimple_phi_result (stmt))
2166 else if (TREE_CODE (arg) == INTEGER_CST)
2168 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2171 result = boolean_false_node;
2172 else if (!integer_zerop (result))
2176 result = fold_build2 (code2, boolean_type_node,
2178 else if (!same_bool_comparison_p (result,
2182 else if (TREE_CODE (arg) == SSA_NAME)
2184 tree temp = and_var_with_comparison (arg, invert,
2190 else if (!same_bool_result_p (result, temp))
2206 /* Try to simplify the AND of two comparisons, specified by
2207 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2208 If this can be simplified to a single expression (without requiring
2209 introducing more SSA variables to hold intermediate values),
2210 return the resulting tree. Otherwise return NULL_TREE.
2211 If the result expression is non-null, it has boolean type. */
2214 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2215 enum tree_code code2, tree op2a, tree op2b)
2217 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2221 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2224 /* Helper function for or_comparisons_1: try to simplify the OR of the
2225 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2226 If INVERT is true, invert the value of VAR before doing the OR.
2227 Return NULL_EXPR if we can't simplify this to a single expression. */
2230 or_var_with_comparison (tree var, bool invert,
2231 enum tree_code code2, tree op2a, tree op2b)
2234 gimple stmt = SSA_NAME_DEF_STMT (var);
2236 /* We can only deal with variables whose definitions are assignments. */
2237 if (!is_gimple_assign (stmt))
2240 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2241 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2242 Then we only have to consider the simpler non-inverted cases. */
2244 t = and_var_with_comparison_1 (stmt,
2245 invert_tree_comparison (code2, false),
2248 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2249 return canonicalize_bool (t, invert);
2252 /* Try to simplify the OR of the ssa variable defined by the assignment
2253 STMT with the comparison specified by (OP2A CODE2 OP2B).
2254 Return NULL_EXPR if we can't simplify this to a single expression. */
2257 or_var_with_comparison_1 (gimple stmt,
2258 enum tree_code code2, tree op2a, tree op2b)
2260 tree var = gimple_assign_lhs (stmt);
2261 tree true_test_var = NULL_TREE;
2262 tree false_test_var = NULL_TREE;
2263 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2265 /* Check for identities like (var OR (var != 0)) => true . */
2266 if (TREE_CODE (op2a) == SSA_NAME
2267 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2269 if ((code2 == NE_EXPR && integer_zerop (op2b))
2270 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2272 true_test_var = op2a;
2273 if (var == true_test_var)
2276 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2277 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2279 false_test_var = op2a;
2280 if (var == false_test_var)
2281 return boolean_true_node;
2285 /* If the definition is a comparison, recurse on it. */
2286 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2288 tree t = or_comparisons_1 (innercode,
2289 gimple_assign_rhs1 (stmt),
2290 gimple_assign_rhs2 (stmt),
2298 /* If the definition is an AND or OR expression, we may be able to
2299 simplify by reassociating. */
2300 if (innercode == TRUTH_AND_EXPR
2301 || innercode == TRUTH_OR_EXPR
2302 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2303 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2305 tree inner1 = gimple_assign_rhs1 (stmt);
2306 tree inner2 = gimple_assign_rhs2 (stmt);
2309 tree partial = NULL_TREE;
2310 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2312 /* Check for boolean identities that don't require recursive examination
2314 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2315 inner1 OR (inner1 AND inner2) => inner1
2316 !inner1 OR (inner1 OR inner2) => true
2317 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2319 if (inner1 == true_test_var)
2320 return (is_or ? var : inner1);
2321 else if (inner2 == true_test_var)
2322 return (is_or ? var : inner2);
2323 else if (inner1 == false_test_var)
2326 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2327 else if (inner2 == false_test_var)
2330 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2332 /* Next, redistribute/reassociate the OR across the inner tests.
2333 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2334 if (TREE_CODE (inner1) == SSA_NAME
2335 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2336 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2337 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2338 gimple_assign_rhs1 (s),
2339 gimple_assign_rhs2 (s),
2340 code2, op2a, op2b)))
2342 /* Handle the OR case, where we are reassociating:
2343 (inner1 OR inner2) OR (op2a code2 op2b)
2345 If the partial result t is a constant, we win. Otherwise
2346 continue on to try reassociating with the other inner test. */
2347 if (innercode == TRUTH_OR_EXPR)
2349 if (integer_onep (t))
2350 return boolean_true_node;
2351 else if (integer_zerop (t))
2355 /* Handle the AND case, where we are redistributing:
2356 (inner1 AND inner2) OR (op2a code2 op2b)
2357 => (t AND (inner2 OR (op2a code op2b))) */
2360 if (integer_zerop (t))
2361 return boolean_false_node;
2363 /* Save partial result for later. */
2368 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2369 if (TREE_CODE (inner2) == SSA_NAME
2370 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2371 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2372 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2373 gimple_assign_rhs1 (s),
2374 gimple_assign_rhs2 (s),
2375 code2, op2a, op2b)))
2377 /* Handle the OR case, where we are reassociating:
2378 (inner1 OR inner2) OR (op2a code2 op2b)
2380 if (innercode == TRUTH_OR_EXPR)
2382 if (integer_zerop (t))
2384 else if (integer_onep (t))
2385 return boolean_true_node;
2388 /* Handle the AND case, where we are redistributing:
2389 (inner1 AND inner2) OR (op2a code2 op2b)
2390 => (t AND (inner1 OR (op2a code2 op2b)))
2391 => (t AND partial) */
2394 if (integer_zerop (t))
2395 return boolean_false_node;
2398 /* We already got a simplification for the other
2399 operand to the redistributed AND expression. The
2400 interesting case is when at least one is true.
2401 Or, if both are the same, we can apply the identity
2402 (x AND x) == true. */
2403 if (integer_onep (partial))
2405 else if (integer_onep (t))
2407 else if (same_bool_result_p (t, partial))
2408 return boolean_true_node;
2416 /* Try to simplify the OR of two comparisons defined by
2417 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2418 If this can be done without constructing an intermediate value,
2419 return the resulting tree; otherwise NULL_TREE is returned.
2420 This function is deliberately asymmetric as it recurses on SSA_DEFs
2421 in the first comparison but not the second. */
2424 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2425 enum tree_code code2, tree op2a, tree op2b)
2427 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2428 if (operand_equal_p (op1a, op2a, 0)
2429 && operand_equal_p (op1b, op2b, 0))
2431 tree t = combine_comparisons (UNKNOWN_LOCATION,
2432 TRUTH_ORIF_EXPR, code1, code2,
2433 boolean_type_node, op1a, op1b);
2438 /* Likewise the swapped case of the above. */
2439 if (operand_equal_p (op1a, op2b, 0)
2440 && operand_equal_p (op1b, op2a, 0))
2442 tree t = combine_comparisons (UNKNOWN_LOCATION,
2443 TRUTH_ORIF_EXPR, code1,
2444 swap_tree_comparison (code2),
2445 boolean_type_node, op1a, op1b);
2450 /* If both comparisons are of the same value against constants, we might
2451 be able to merge them. */
2452 if (operand_equal_p (op1a, op2a, 0)
2453 && TREE_CODE (op1b) == INTEGER_CST
2454 && TREE_CODE (op2b) == INTEGER_CST)
2456 int cmp = tree_int_cst_compare (op1b, op2b);
2458 /* If we have (op1a != op1b), we should either be able to
2459 return that or TRUE, depending on whether the constant op1b
2460 also satisfies the other comparison against op2b. */
2461 if (code1 == NE_EXPR)
2467 case EQ_EXPR: val = (cmp == 0); break;
2468 case NE_EXPR: val = (cmp != 0); break;
2469 case LT_EXPR: val = (cmp < 0); break;
2470 case GT_EXPR: val = (cmp > 0); break;
2471 case LE_EXPR: val = (cmp <= 0); break;
2472 case GE_EXPR: val = (cmp >= 0); break;
2473 default: done = false;
2478 return boolean_true_node;
2480 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2483 /* Likewise if the second comparison is a != comparison. */
2484 else if (code2 == NE_EXPR)
2490 case EQ_EXPR: val = (cmp == 0); break;
2491 case NE_EXPR: val = (cmp != 0); break;
2492 case LT_EXPR: val = (cmp > 0); break;
2493 case GT_EXPR: val = (cmp < 0); break;
2494 case LE_EXPR: val = (cmp >= 0); break;
2495 case GE_EXPR: val = (cmp <= 0); break;
2496 default: done = false;
2501 return boolean_true_node;
2503 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2507 /* See if an equality test is redundant with the other comparison. */
2508 else if (code1 == EQ_EXPR)
2513 case EQ_EXPR: val = (cmp == 0); break;
2514 case NE_EXPR: val = (cmp != 0); break;
2515 case LT_EXPR: val = (cmp < 0); break;
2516 case GT_EXPR: val = (cmp > 0); break;
2517 case LE_EXPR: val = (cmp <= 0); break;
2518 case GE_EXPR: val = (cmp >= 0); break;
2523 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2525 else if (code2 == EQ_EXPR)
2530 case EQ_EXPR: val = (cmp == 0); break;
2531 case NE_EXPR: val = (cmp != 0); break;
2532 case LT_EXPR: val = (cmp > 0); break;
2533 case GT_EXPR: val = (cmp < 0); break;
2534 case LE_EXPR: val = (cmp >= 0); break;
2535 case GE_EXPR: val = (cmp <= 0); break;
2540 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2543 /* Chose the less restrictive of two < or <= comparisons. */
2544 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2545 && (code2 == LT_EXPR || code2 == LE_EXPR))
2547 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2548 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2550 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2553 /* Likewise chose the less restrictive of two > or >= comparisons. */
2554 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2555 && (code2 == GT_EXPR || code2 == GE_EXPR))
2557 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2558 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2560 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2563 /* Check for singleton ranges. */
2565 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2566 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2567 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2569 /* Check for less/greater pairs that don't restrict the range at all. */
2571 && (code1 == LT_EXPR || code1 == LE_EXPR)
2572 && (code2 == GT_EXPR || code2 == GE_EXPR))
2573 return boolean_true_node;
2575 && (code1 == GT_EXPR || code1 == GE_EXPR)
2576 && (code2 == LT_EXPR || code2 == LE_EXPR))
2577 return boolean_true_node;
2580 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2581 NAME's definition is a truth value. See if there are any simplifications
2582 that can be done against the NAME's definition. */
2583 if (TREE_CODE (op1a) == SSA_NAME
2584 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2585 && (integer_zerop (op1b) || integer_onep (op1b)))
2587 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2588 || (code1 == NE_EXPR && integer_onep (op1b)));
2589 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2590 switch (gimple_code (stmt))
2593 /* Try to simplify by copy-propagating the definition. */
2594 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2597 /* If every argument to the PHI produces the same result when
2598 ORed with the second comparison, we win.
2599 Do not do this unless the type is bool since we need a bool
2600 result here anyway. */
2601 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2603 tree result = NULL_TREE;
2605 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2607 tree arg = gimple_phi_arg_def (stmt, i);
2609 /* If this PHI has itself as an argument, ignore it.
2610 If all the other args produce the same result,
2612 if (arg == gimple_phi_result (stmt))
2614 else if (TREE_CODE (arg) == INTEGER_CST)
2616 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2619 result = boolean_true_node;
2620 else if (!integer_onep (result))
2624 result = fold_build2 (code2, boolean_type_node,
2626 else if (!same_bool_comparison_p (result,
2630 else if (TREE_CODE (arg) == SSA_NAME)
2632 tree temp = or_var_with_comparison (arg, invert,
2638 else if (!same_bool_result_p (result, temp))
2654 /* Try to simplify the OR of two comparisons, specified by
2655 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2656 If this can be simplified to a single expression (without requiring
2657 introducing more SSA variables to hold intermediate values),
2658 return the resulting tree. Otherwise return NULL_TREE.
2659 If the result expression is non-null, it has boolean type. */
2662 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2663 enum tree_code code2, tree op2a, tree op2b)
2665 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2669 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);