1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
56 can_refer_decl_in_current_unit_p (tree decl)
58 struct varpool_node *vnode;
59 struct cgraph_node *node;
61 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
66 && TREE_CODE (decl) == VAR_DECL)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
71 gcc_checking_assert (!(vnode = varpool_get_node (decl))
72 || !vnode->finalized);
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
83 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl))
88 if (TREE_CODE (decl) == FUNCTION_DECL)
90 node = cgraph_get_node (decl);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
96 if (!node || !node->analyzed || node->global.inlined_to)
99 else if (TREE_CODE (decl) == VAR_DECL)
101 vnode = varpool_get_node (decl);
102 if (!vnode || !vnode->finalized)
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
112 canonicalize_constructor_val (tree cval)
115 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
117 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
118 TREE_OPERAND (cval, 0),
119 TREE_OPERAND (cval, 1),
124 if (TREE_CODE (cval) == ADDR_EXPR)
126 tree base = get_base_address (TREE_OPERAND (cval, 0));
129 && (TREE_CODE (base) == VAR_DECL
130 || TREE_CODE (base) == FUNCTION_DECL)
131 && !can_refer_decl_in_current_unit_p (base))
133 if (base && TREE_CODE (base) == VAR_DECL)
134 add_referenced_var (base);
135 /* We never have the chance to fixup types in global initializers
136 during gimplification. Do so here. */
137 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
138 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
143 /* If SYM is a constant variable with known value, return the value.
144 NULL_TREE is returned otherwise. */
147 get_symbol_constant_value (tree sym)
149 if (const_value_known_p (sym))
151 tree val = DECL_INITIAL (sym);
154 val = canonicalize_constructor_val (val);
155 if (val && is_gimple_min_invariant (val))
160 /* Variables declared 'const' without an initializer
161 have zero as the initializer if they may not be
162 overridden at link or run time. */
164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
166 return build_zero_cst (TREE_TYPE (sym));
173 /* Return true if we may propagate the address expression ADDR into the
174 dereference DEREF and cancel them. */
177 may_propagate_address_into_dereference (tree addr, tree deref)
179 gcc_assert (TREE_CODE (deref) == MEM_REF
180 && TREE_CODE (addr) == ADDR_EXPR);
182 /* Don't propagate if ADDR's operand has incomplete type. */
183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
186 /* If the address is invariant then we do not need to preserve restrict
187 qualifications. But we do need to preserve volatile qualifiers until
188 we can annotate the folded dereference itself properly. */
189 if (is_gimple_min_invariant (addr)
190 && (!TREE_THIS_VOLATILE (deref)
191 || TYPE_VOLATILE (TREE_TYPE (addr))))
192 return useless_type_conversion_p (TREE_TYPE (deref),
193 TREE_TYPE (TREE_OPERAND (addr, 0)));
195 /* Else both the address substitution and the folding must result in
196 a valid useless type conversion sequence. */
197 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
199 && useless_type_conversion_p (TREE_TYPE (deref),
200 TREE_TYPE (TREE_OPERAND (addr, 0))));
204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
205 BASE is an array type. OFFSET is a byte displacement.
207 LOC is the location of the original expression. */
210 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
212 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
213 tree array_type, elt_type, elt_size;
216 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
217 measured in units of the size of elements type) from that ARRAY_REF).
218 We can't do anything if either is variable.
220 The case we handle here is *(&A[N]+O). */
221 if (TREE_CODE (base) == ARRAY_REF)
223 tree low_bound = array_ref_low_bound (base);
225 elt_offset = TREE_OPERAND (base, 1);
226 if (TREE_CODE (low_bound) != INTEGER_CST
227 || TREE_CODE (elt_offset) != INTEGER_CST)
230 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
231 base = TREE_OPERAND (base, 0);
234 /* Ignore stupid user tricks of indexing non-array variables. */
235 array_type = TREE_TYPE (base);
236 if (TREE_CODE (array_type) != ARRAY_TYPE)
238 elt_type = TREE_TYPE (array_type);
240 /* Use signed size type for intermediate computation on the index. */
241 idx_type = ssizetype;
243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
244 element type (so we can use the alignment if it's not constant).
245 Otherwise, compute the offset as an index by using a division. If the
246 division isn't exact, then don't do anything. */
247 elt_size = TYPE_SIZE_UNIT (elt_type);
250 if (integer_zerop (offset))
252 if (TREE_CODE (elt_size) != INTEGER_CST)
253 elt_size = size_int (TYPE_ALIGN (elt_type));
255 idx = build_int_cst (idx_type, 0);
259 unsigned HOST_WIDE_INT lquo, lrem;
260 HOST_WIDE_INT hquo, hrem;
263 /* The final array offset should be signed, so we need
264 to sign-extend the (possibly pointer) offset here
265 and use signed division. */
266 soffset = double_int_sext (tree_to_double_int (offset),
267 TYPE_PRECISION (TREE_TYPE (offset)));
268 if (TREE_CODE (elt_size) != INTEGER_CST
269 || div_and_round_double (TRUNC_DIV_EXPR, 0,
270 soffset.low, soffset.high,
271 TREE_INT_CST_LOW (elt_size),
272 TREE_INT_CST_HIGH (elt_size),
273 &lquo, &hquo, &lrem, &hrem)
277 idx = build_int_cst_wide (idx_type, lquo, hquo);
280 /* Assume the low bound is zero. If there is a domain type, get the
281 low bound, if any, convert the index into that type, and add the
283 min_idx = build_int_cst (idx_type, 0);
284 domain_type = TYPE_DOMAIN (array_type);
287 idx_type = domain_type;
288 if (TYPE_MIN_VALUE (idx_type))
289 min_idx = TYPE_MIN_VALUE (idx_type);
291 min_idx = fold_convert (idx_type, min_idx);
293 if (TREE_CODE (min_idx) != INTEGER_CST)
296 elt_offset = fold_convert (idx_type, elt_offset);
299 if (!integer_zerop (min_idx))
300 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
301 if (!integer_zerop (elt_offset))
302 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
304 /* Make sure to possibly truncate late after offsetting. */
305 idx = fold_convert (idx_type, idx);
307 /* We don't want to construct access past array bounds. For example
310 should not be simplified into (*c)[14] or tree-vrp will
312 This is only an issue for multi-dimensional arrays. */
313 if (TREE_CODE (elt_type) == ARRAY_TYPE
316 if (TYPE_MAX_VALUE (domain_type)
317 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
320 else if (TYPE_MIN_VALUE (domain_type)
321 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
322 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
324 else if (compare_tree_int (idx, 0) < 0)
329 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
330 SET_EXPR_LOCATION (t, loc);
336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
337 LOC is the location of original expression.
339 Before attempting the conversion strip off existing ADDR_EXPRs. */
342 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
348 if (TREE_CODE (base) != ADDR_EXPR)
351 base = TREE_OPERAND (base, 0);
352 if (types_compatible_p (orig_type, TREE_TYPE (base))
353 && integer_zerop (offset))
356 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
357 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
363 LOC is the location of the original expression. */
366 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
372 if (TREE_CODE (addr) != ADDR_EXPR)
374 base = TREE_OPERAND (addr, 0);
375 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
378 ret = build_fold_addr_expr (ret);
379 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
381 SET_EXPR_LOCATION (ret, loc);
388 /* A quaint feature extant in our address arithmetic is that there
389 can be hidden type changes here. The type of the result need
390 not be the same as the type of the input pointer.
392 What we're after here is an expression of the form
393 (T *)(&array + const)
394 where array is OP0, const is OP1, RES_TYPE is T and
395 the cast doesn't actually exist, but is implicit in the
396 type of the POINTER_PLUS_EXPR. We'd like to turn this into
398 which may be able to propagate further. */
401 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
406 /* The first operand should be an ADDR_EXPR. */
407 if (TREE_CODE (op0) != ADDR_EXPR)
409 op0 = TREE_OPERAND (op0, 0);
411 /* It had better be a constant. */
412 if (TREE_CODE (op1) != INTEGER_CST)
414 /* Or op0 should now be A[0] and the non-constant offset defined
415 via a multiplication by the array element size. */
416 if (TREE_CODE (op0) == ARRAY_REF
417 /* As we will end up creating a variable index array access
418 in the outermost array dimension make sure there isn't
419 a more inner array that the index could overflow to. */
420 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
421 && integer_zerop (TREE_OPERAND (op0, 1))
422 && TREE_CODE (op1) == SSA_NAME)
424 gimple offset_def = SSA_NAME_DEF_STMT (op1);
425 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
426 if (!host_integerp (elsz, 1)
427 || !is_gimple_assign (offset_def))
430 /* Do not build array references of something that we can't
431 see the true number of array dimensions for. */
432 if (!DECL_P (TREE_OPERAND (op0, 0))
433 && !handled_component_p (TREE_OPERAND (op0, 0)))
436 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
437 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
439 return build_fold_addr_expr
440 (build4 (ARRAY_REF, TREE_TYPE (op0),
441 TREE_OPERAND (op0, 0),
442 gimple_assign_rhs1 (offset_def),
443 TREE_OPERAND (op0, 2),
444 TREE_OPERAND (op0, 3)));
445 else if (integer_onep (elsz)
446 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
447 return build_fold_addr_expr
448 (build4 (ARRAY_REF, TREE_TYPE (op0),
449 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 2),
452 TREE_OPERAND (op0, 3)));
454 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
457 && TREE_CODE (op1) == SSA_NAME)
459 gimple offset_def = SSA_NAME_DEF_STMT (op1);
460 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
461 if (!host_integerp (elsz, 1)
462 || !is_gimple_assign (offset_def))
465 /* Do not build array references of something that we can't
466 see the true number of array dimensions for. */
468 && !handled_component_p (op0))
471 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
472 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
474 return build_fold_addr_expr
475 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
476 op0, gimple_assign_rhs1 (offset_def),
477 integer_zero_node, NULL_TREE));
478 else if (integer_onep (elsz)
479 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
480 return build_fold_addr_expr
481 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
483 integer_zero_node, NULL_TREE));
489 /* If the first operand is an ARRAY_REF, expand it so that we can fold
490 the offset into it. */
491 while (TREE_CODE (op0) == ARRAY_REF)
493 tree array_obj = TREE_OPERAND (op0, 0);
494 tree array_idx = TREE_OPERAND (op0, 1);
495 tree elt_type = TREE_TYPE (op0);
496 tree elt_size = TYPE_SIZE_UNIT (elt_type);
499 if (TREE_CODE (array_idx) != INTEGER_CST)
501 if (TREE_CODE (elt_size) != INTEGER_CST)
504 /* Un-bias the index by the min index of the array type. */
505 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
508 min_idx = TYPE_MIN_VALUE (min_idx);
511 if (TREE_CODE (min_idx) != INTEGER_CST)
514 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
515 if (!integer_zerop (min_idx))
516 array_idx = int_const_binop (MINUS_EXPR, array_idx,
521 /* Convert the index to a byte offset. */
522 array_idx = fold_convert (sizetype, array_idx);
523 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
525 /* Update the operands for the next round, or for folding. */
526 op1 = int_const_binop (PLUS_EXPR,
531 ptd_type = TREE_TYPE (res_type);
532 /* If we want a pointer to void, reconstruct the reference from the
533 array element type. A pointer to that can be trivially converted
534 to void *. This happens as we fold (void *)(ptr p+ off). */
535 if (VOID_TYPE_P (ptd_type)
536 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
537 ptd_type = TREE_TYPE (TREE_TYPE (op0));
539 /* At which point we can try some of the same things as for indirects. */
540 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
543 t = build_fold_addr_expr (t);
544 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
546 SET_EXPR_LOCATION (t, loc);
552 /* Subroutine of fold_stmt. We perform several simplifications of the
553 memory reference tree EXPR and make sure to re-gimplify them properly
554 after propagation of constant addresses. IS_LHS is true if the
555 reference is supposed to be an lvalue. */
558 maybe_fold_reference (tree expr, bool is_lhs)
564 && (result = fold_const_aggregate_ref (expr))
565 && is_gimple_min_invariant (result))
568 /* ??? We might want to open-code the relevant remaining cases
569 to avoid using the generic fold. */
570 if (handled_component_p (*t)
571 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
573 tree tem = fold (*t);
578 while (handled_component_p (*t))
579 t = &TREE_OPERAND (*t, 0);
581 /* Fold back MEM_REFs to reference trees. */
582 if (TREE_CODE (*t) == MEM_REF
583 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
584 && integer_zerop (TREE_OPERAND (*t, 1))
585 && (TREE_THIS_VOLATILE (*t)
586 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
587 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
588 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
589 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
590 /* We have to look out here to not drop a required conversion
591 from the rhs to the lhs if is_lhs, but we don't have the
592 rhs here to verify that. Thus require strict type
594 && types_compatible_p (TREE_TYPE (*t),
595 TREE_TYPE (TREE_OPERAND
596 (TREE_OPERAND (*t, 0), 0))))
599 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
600 tem = maybe_fold_reference (expr, is_lhs);
605 /* Canonicalize MEM_REFs invariant address operand. */
606 else if (TREE_CODE (*t) == MEM_REF
607 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
609 bool volatile_p = TREE_THIS_VOLATILE (*t);
610 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
611 TREE_OPERAND (*t, 0),
612 TREE_OPERAND (*t, 1));
615 TREE_THIS_VOLATILE (tem) = volatile_p;
617 tem = maybe_fold_reference (expr, is_lhs);
623 else if (TREE_CODE (*t) == TARGET_MEM_REF)
625 tree tem = maybe_fold_tmr (*t);
629 tem = maybe_fold_reference (expr, is_lhs);
638 tree tem = get_symbol_constant_value (*t);
640 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
642 *t = unshare_expr (tem);
643 tem = maybe_fold_reference (expr, is_lhs);
654 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
655 replacement rhs for the statement or NULL_TREE if no simplification
656 could be made. It is assumed that the operands have been previously
660 fold_gimple_assign (gimple_stmt_iterator *si)
662 gimple stmt = gsi_stmt (*si);
663 enum tree_code subcode = gimple_assign_rhs_code (stmt);
664 location_t loc = gimple_location (stmt);
666 tree result = NULL_TREE;
668 switch (get_gimple_rhs_class (subcode))
670 case GIMPLE_SINGLE_RHS:
672 tree rhs = gimple_assign_rhs1 (stmt);
674 /* Try to fold a conditional expression. */
675 if (TREE_CODE (rhs) == COND_EXPR)
677 tree op0 = COND_EXPR_COND (rhs);
680 location_t cond_loc = EXPR_LOCATION (rhs);
682 if (COMPARISON_CLASS_P (op0))
684 fold_defer_overflow_warnings ();
685 tem = fold_binary_loc (cond_loc,
686 TREE_CODE (op0), TREE_TYPE (op0),
687 TREE_OPERAND (op0, 0),
688 TREE_OPERAND (op0, 1));
689 /* This is actually a conditional expression, not a GIMPLE
690 conditional statement, however, the valid_gimple_rhs_p
691 test still applies. */
692 set = (tem && is_gimple_condexpr (tem)
693 && valid_gimple_rhs_p (tem));
694 fold_undefer_overflow_warnings (set, stmt, 0);
696 else if (is_gimple_min_invariant (op0))
705 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
706 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
709 else if (REFERENCE_CLASS_P (rhs))
710 return maybe_fold_reference (rhs, false);
712 else if (TREE_CODE (rhs) == ADDR_EXPR)
714 tree ref = TREE_OPERAND (rhs, 0);
715 tree tem = maybe_fold_reference (ref, true);
717 && TREE_CODE (tem) == MEM_REF
718 && integer_zerop (TREE_OPERAND (tem, 1)))
719 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
721 result = fold_convert (TREE_TYPE (rhs),
722 build_fold_addr_expr_loc (loc, tem));
723 else if (TREE_CODE (ref) == MEM_REF
724 && integer_zerop (TREE_OPERAND (ref, 1)))
725 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
728 else if (TREE_CODE (rhs) == CONSTRUCTOR
729 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
730 && (CONSTRUCTOR_NELTS (rhs)
731 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
733 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
737 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
738 if (TREE_CODE (val) != INTEGER_CST
739 && TREE_CODE (val) != REAL_CST
740 && TREE_CODE (val) != FIXED_CST)
743 return build_vector_from_ctor (TREE_TYPE (rhs),
744 CONSTRUCTOR_ELTS (rhs));
747 else if (DECL_P (rhs))
748 return unshare_expr (get_symbol_constant_value (rhs));
750 /* If we couldn't fold the RHS, hand over to the generic
752 if (result == NULL_TREE)
755 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
756 that may have been added by fold, and "useless" type
757 conversions that might now be apparent due to propagation. */
758 STRIP_USELESS_TYPE_CONVERSION (result);
760 if (result != rhs && valid_gimple_rhs_p (result))
767 case GIMPLE_UNARY_RHS:
769 tree rhs = gimple_assign_rhs1 (stmt);
771 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
774 /* If the operation was a conversion do _not_ mark a
775 resulting constant with TREE_OVERFLOW if the original
776 constant was not. These conversions have implementation
777 defined behavior and retaining the TREE_OVERFLOW flag
778 here would confuse later passes such as VRP. */
779 if (CONVERT_EXPR_CODE_P (subcode)
780 && TREE_CODE (result) == INTEGER_CST
781 && TREE_CODE (rhs) == INTEGER_CST)
782 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
784 STRIP_USELESS_TYPE_CONVERSION (result);
785 if (valid_gimple_rhs_p (result))
788 else if (CONVERT_EXPR_CODE_P (subcode)
789 && POINTER_TYPE_P (gimple_expr_type (stmt))
790 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
792 tree type = gimple_expr_type (stmt);
793 tree t = maybe_fold_offset_to_address (loc,
794 gimple_assign_rhs1 (stmt),
795 integer_zero_node, type);
802 case GIMPLE_BINARY_RHS:
803 /* Try to fold pointer addition. */
804 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
806 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
807 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
809 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
810 if (!useless_type_conversion_p
811 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
812 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
814 result = maybe_fold_stmt_addition (gimple_location (stmt),
816 gimple_assign_rhs1 (stmt),
817 gimple_assign_rhs2 (stmt));
821 result = fold_binary_loc (loc, subcode,
822 TREE_TYPE (gimple_assign_lhs (stmt)),
823 gimple_assign_rhs1 (stmt),
824 gimple_assign_rhs2 (stmt));
828 STRIP_USELESS_TYPE_CONVERSION (result);
829 if (valid_gimple_rhs_p (result))
832 /* Fold might have produced non-GIMPLE, so if we trust it blindly
833 we lose canonicalization opportunities. Do not go again
834 through fold here though, or the same non-GIMPLE will be
836 if (commutative_tree_code (subcode)
837 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
838 gimple_assign_rhs2 (stmt), false))
839 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
840 gimple_assign_rhs2 (stmt),
841 gimple_assign_rhs1 (stmt));
845 case GIMPLE_TERNARY_RHS:
846 result = fold_ternary_loc (loc, subcode,
847 TREE_TYPE (gimple_assign_lhs (stmt)),
848 gimple_assign_rhs1 (stmt),
849 gimple_assign_rhs2 (stmt),
850 gimple_assign_rhs3 (stmt));
854 STRIP_USELESS_TYPE_CONVERSION (result);
855 if (valid_gimple_rhs_p (result))
858 /* Fold might have produced non-GIMPLE, so if we trust it blindly
859 we lose canonicalization opportunities. Do not go again
860 through fold here though, or the same non-GIMPLE will be
862 if (commutative_ternary_tree_code (subcode)
863 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
864 gimple_assign_rhs2 (stmt), false))
865 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
866 gimple_assign_rhs2 (stmt),
867 gimple_assign_rhs1 (stmt),
868 gimple_assign_rhs3 (stmt));
872 case GIMPLE_INVALID_RHS:
879 /* Attempt to fold a conditional statement. Return true if any changes were
880 made. We only attempt to fold the condition expression, and do not perform
881 any transformation that would require alteration of the cfg. It is
882 assumed that the operands have been previously folded. */
885 fold_gimple_cond (gimple stmt)
887 tree result = fold_binary_loc (gimple_location (stmt),
888 gimple_cond_code (stmt),
890 gimple_cond_lhs (stmt),
891 gimple_cond_rhs (stmt));
895 STRIP_USELESS_TYPE_CONVERSION (result);
896 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
898 gimple_cond_set_condition_from_tree (stmt, result);
906 /* Convert EXPR into a GIMPLE value suitable for substitution on the
907 RHS of an assignment. Insert the necessary statements before
908 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
909 is replaced. If the call is expected to produces a result, then it
910 is replaced by an assignment of the new RHS to the result variable.
911 If the result is to be ignored, then the call is replaced by a
912 GIMPLE_NOP. A proper VDEF chain is retained by making the first
913 VUSE and the last VDEF of the whole sequence be the same as the replaced
914 statement and using new SSA names for stores in between. */
917 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
920 tree tmp = NULL_TREE; /* Silence warning. */
921 gimple stmt, new_stmt;
922 gimple_stmt_iterator i;
923 gimple_seq stmts = gimple_seq_alloc();
924 struct gimplify_ctx gctx;
926 gimple laststore = NULL;
929 stmt = gsi_stmt (*si_p);
931 gcc_assert (is_gimple_call (stmt));
933 lhs = gimple_call_lhs (stmt);
934 reaching_vuse = gimple_vuse (stmt);
936 push_gimplify_context (&gctx);
938 if (lhs == NULL_TREE)
940 gimplify_and_add (expr, &stmts);
941 /* We can end up with folding a memcpy of an empty class assignment
942 which gets optimized away by C++ gimplification. */
943 if (gimple_seq_empty_p (stmts))
945 if (gimple_in_ssa_p (cfun))
947 unlink_stmt_vdef (stmt);
950 gsi_remove (si_p, true);
955 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
957 pop_gimplify_context (NULL);
959 if (gimple_has_location (stmt))
960 annotate_all_with_location (stmts, gimple_location (stmt));
962 /* The replacement can expose previously unreferenced variables. */
963 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
967 gsi_insert_before (si_p, last, GSI_NEW_STMT);
970 new_stmt = gsi_stmt (i);
971 if (gimple_in_ssa_p (cfun))
973 find_new_referenced_vars (new_stmt);
974 mark_symbols_for_renaming (new_stmt);
976 /* If the new statement has a VUSE, update it with exact SSA name we
977 know will reach this one. */
978 if (gimple_vuse (new_stmt))
980 /* If we've also seen a previous store create a new VDEF for
981 the latter one, and make that the new reaching VUSE. */
984 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
985 gimple_set_vdef (laststore, reaching_vuse);
986 update_stmt (laststore);
989 gimple_set_vuse (new_stmt, reaching_vuse);
990 gimple_set_modified (new_stmt, true);
992 if (gimple_assign_single_p (new_stmt)
993 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
995 laststore = new_stmt;
1000 if (lhs == NULL_TREE)
1002 /* If we replace a call without LHS that has a VDEF and our new
1003 sequence ends with a store we must make that store have the same
1004 vdef in order not to break the sequencing. This can happen
1005 for instance when folding memcpy calls into assignments. */
1006 if (gimple_vdef (stmt) && laststore)
1008 gimple_set_vdef (laststore, gimple_vdef (stmt));
1009 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1010 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1011 update_stmt (laststore);
1013 else if (gimple_in_ssa_p (cfun))
1015 unlink_stmt_vdef (stmt);
1016 release_defs (stmt);
1024 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1027 if (laststore && is_gimple_reg (lhs))
1029 gimple_set_vdef (laststore, gimple_vdef (stmt));
1030 update_stmt (laststore);
1031 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1032 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1037 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1038 gimple_set_vdef (laststore, reaching_vuse);
1039 update_stmt (laststore);
1042 new_stmt = gimple_build_assign (lhs, tmp);
1043 if (!is_gimple_reg (tmp))
1044 gimple_set_vuse (new_stmt, reaching_vuse);
1045 if (!is_gimple_reg (lhs))
1047 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1048 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1049 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1051 else if (reaching_vuse == gimple_vuse (stmt))
1052 unlink_stmt_vdef (stmt);
1055 gimple_set_location (new_stmt, gimple_location (stmt));
1056 gsi_replace (si_p, new_stmt, false);
1059 /* Return the string length, maximum string length or maximum value of
1061 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1062 is not NULL and, for TYPE == 0, its value is not equal to the length
1063 we determine or if we are unable to determine the length or value,
1064 return false. VISITED is a bitmap of visited variables.
1065 TYPE is 0 if string length should be returned, 1 for maximum string
1066 length and 2 for maximum value ARG can have. */
1069 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1074 if (TREE_CODE (arg) != SSA_NAME)
1076 if (TREE_CODE (arg) == COND_EXPR)
1077 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1078 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1079 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1080 else if (TREE_CODE (arg) == ADDR_EXPR
1081 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1082 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1084 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1085 if (TREE_CODE (aop0) == INDIRECT_REF
1086 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1087 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1088 length, visited, type);
1094 if (TREE_CODE (val) != INTEGER_CST
1095 || tree_int_cst_sgn (val) < 0)
1099 val = c_strlen (arg, 1);
1107 if (TREE_CODE (*length) != INTEGER_CST
1108 || TREE_CODE (val) != INTEGER_CST)
1111 if (tree_int_cst_lt (*length, val))
1115 else if (simple_cst_equal (val, *length) != 1)
1123 /* If we were already here, break the infinite cycle. */
1124 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1128 def_stmt = SSA_NAME_DEF_STMT (var);
1130 switch (gimple_code (def_stmt))
1133 /* The RHS of the statement defining VAR must either have a
1134 constant length or come from another SSA_NAME with a constant
1136 if (gimple_assign_single_p (def_stmt)
1137 || gimple_assign_unary_nop_p (def_stmt))
1139 tree rhs = gimple_assign_rhs1 (def_stmt);
1140 return get_maxval_strlen (rhs, length, visited, type);
1146 /* All the arguments of the PHI node must have the same constant
1150 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1152 tree arg = gimple_phi_arg (def_stmt, i)->def;
1154 /* If this PHI has itself as an argument, we cannot
1155 determine the string length of this argument. However,
1156 if we can find a constant string length for the other
1157 PHI args then we can still be sure that this is a
1158 constant string length. So be optimistic and just
1159 continue with the next argument. */
1160 if (arg == gimple_phi_result (def_stmt))
1163 if (!get_maxval_strlen (arg, length, visited, type))
1175 /* Fold builtin call in statement STMT. Returns a simplified tree.
1176 We may return a non-constant expression, including another call
1177 to a different function and with different arguments, e.g.,
1178 substituting memcpy for strcpy when the string length is known.
1179 Note that some builtins expand into inline code that may not
1180 be valid in GIMPLE. Callers must take care. */
1183 gimple_fold_builtin (gimple stmt)
1185 tree result, val[3];
1191 location_t loc = gimple_location (stmt);
1193 gcc_assert (is_gimple_call (stmt));
1195 ignore = (gimple_call_lhs (stmt) == NULL);
1197 /* First try the generic builtin folder. If that succeeds, return the
1199 result = fold_call_stmt (stmt, ignore);
1203 STRIP_NOPS (result);
1207 /* Ignore MD builtins. */
1208 callee = gimple_call_fndecl (stmt);
1209 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1212 /* If the builtin could not be folded, and it has no argument list,
1214 nargs = gimple_call_num_args (stmt);
1218 /* Limit the work only for builtins we know how to simplify. */
1219 switch (DECL_FUNCTION_CODE (callee))
1221 case BUILT_IN_STRLEN:
1222 case BUILT_IN_FPUTS:
1223 case BUILT_IN_FPUTS_UNLOCKED:
1227 case BUILT_IN_STRCPY:
1228 case BUILT_IN_STRNCPY:
1232 case BUILT_IN_MEMCPY_CHK:
1233 case BUILT_IN_MEMPCPY_CHK:
1234 case BUILT_IN_MEMMOVE_CHK:
1235 case BUILT_IN_MEMSET_CHK:
1236 case BUILT_IN_STRNCPY_CHK:
1240 case BUILT_IN_STRCPY_CHK:
1241 case BUILT_IN_STPCPY_CHK:
1245 case BUILT_IN_SNPRINTF_CHK:
1246 case BUILT_IN_VSNPRINTF_CHK:
1254 if (arg_idx >= nargs)
1257 /* Try to use the dataflow information gathered by the CCP process. */
1258 visited = BITMAP_ALLOC (NULL);
1259 bitmap_clear (visited);
1261 memset (val, 0, sizeof (val));
1262 a = gimple_call_arg (stmt, arg_idx);
1263 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1264 val[arg_idx] = NULL_TREE;
1266 BITMAP_FREE (visited);
1269 switch (DECL_FUNCTION_CODE (callee))
1271 case BUILT_IN_STRLEN:
1272 if (val[0] && nargs == 1)
1275 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1277 /* If the result is not a valid gimple value, or not a cast
1278 of a valid gimple value, then we cannot use the result. */
1279 if (is_gimple_val (new_val)
1280 || (CONVERT_EXPR_P (new_val)
1281 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1286 case BUILT_IN_STRCPY:
1287 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1288 result = fold_builtin_strcpy (loc, callee,
1289 gimple_call_arg (stmt, 0),
1290 gimple_call_arg (stmt, 1),
1294 case BUILT_IN_STRNCPY:
1295 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1296 result = fold_builtin_strncpy (loc, callee,
1297 gimple_call_arg (stmt, 0),
1298 gimple_call_arg (stmt, 1),
1299 gimple_call_arg (stmt, 2),
1303 case BUILT_IN_FPUTS:
1305 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1306 gimple_call_arg (stmt, 1),
1307 ignore, false, val[0]);
1310 case BUILT_IN_FPUTS_UNLOCKED:
1312 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1313 gimple_call_arg (stmt, 1),
1314 ignore, true, val[0]);
1317 case BUILT_IN_MEMCPY_CHK:
1318 case BUILT_IN_MEMPCPY_CHK:
1319 case BUILT_IN_MEMMOVE_CHK:
1320 case BUILT_IN_MEMSET_CHK:
1321 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1322 result = fold_builtin_memory_chk (loc, callee,
1323 gimple_call_arg (stmt, 0),
1324 gimple_call_arg (stmt, 1),
1325 gimple_call_arg (stmt, 2),
1326 gimple_call_arg (stmt, 3),
1328 DECL_FUNCTION_CODE (callee));
1331 case BUILT_IN_STRCPY_CHK:
1332 case BUILT_IN_STPCPY_CHK:
1333 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1334 result = fold_builtin_stxcpy_chk (loc, callee,
1335 gimple_call_arg (stmt, 0),
1336 gimple_call_arg (stmt, 1),
1337 gimple_call_arg (stmt, 2),
1339 DECL_FUNCTION_CODE (callee));
1342 case BUILT_IN_STRNCPY_CHK:
1343 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1344 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1345 gimple_call_arg (stmt, 1),
1346 gimple_call_arg (stmt, 2),
1347 gimple_call_arg (stmt, 3),
1351 case BUILT_IN_SNPRINTF_CHK:
1352 case BUILT_IN_VSNPRINTF_CHK:
1353 if (val[1] && is_gimple_val (val[1]))
1354 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1355 DECL_FUNCTION_CODE (callee));
1362 if (result && ignore)
1363 result = fold_ignored_result (result);
1367 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1368 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1369 KNOWN_BINFO carries the binfo describing the true type of
1370 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1371 with a this adjustment, the constant which should be added to this pointer
1372 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1373 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1376 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1377 tree *delta, bool refuse_thunks)
1381 struct cgraph_node *node;
1383 v = BINFO_VIRTUALS (known_binfo);
1384 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1390 i += (TARGET_VTABLE_USES_DESCRIPTORS
1391 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1395 fndecl = TREE_VALUE (v);
1396 node = cgraph_get_node_or_alias (fndecl);
1399 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1400 thunks are represented by a constant in TREE_PURPOSE of items in
1401 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1404 FIXME: Remove the following condition once we are able to represent
1405 thunk information on call graph edges. */
1406 || (node->same_body_alias && node->thunk.thunk_p)))
1409 /* When cgraph node is missing and function is not public, we cannot
1410 devirtualize. This can happen in WHOPR when the actual method
1411 ends up in other partition, because we found devirtualization
1412 possibility too late. */
1413 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1416 *delta = TREE_PURPOSE (v);
1417 gcc_checking_assert (host_integerp (*delta, 0));
1421 /* Generate code adjusting the first parameter of a call statement determined
1425 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1427 gimple call_stmt = gsi_stmt (*gsi);
1431 delta = fold_convert (sizetype, delta);
1432 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1433 parm = gimple_call_arg (call_stmt, 0);
1434 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1435 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1436 add_referenced_var (tmp);
1438 tmp = make_ssa_name (tmp, NULL);
1439 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1440 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1441 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1442 gimple_call_set_arg (call_stmt, 0, tmp);
1445 /* Fold a call statement to OBJ_TYPE_REF to a direct call, if possible. GSI
1446 determines the statement, generating new statements is allowed only if
1447 INPLACE is false. Return true iff the statement was changed. */
1450 gimple_fold_obj_type_ref_call (gimple_stmt_iterator *gsi)
1452 gimple stmt = gsi_stmt (*gsi);
1453 tree ref = gimple_call_fn (stmt);
1454 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1455 tree binfo, fndecl, delta;
1456 HOST_WIDE_INT token;
1458 if (TREE_CODE (obj) != ADDR_EXPR)
1460 obj = TREE_OPERAND (obj, 0);
1462 || TREE_CODE (TREE_TYPE (obj)) != RECORD_TYPE)
1464 binfo = TYPE_BINFO (TREE_TYPE (obj));
1468 token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1469 fndecl = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta, false);
1472 gcc_assert (integer_zerop (delta));
1473 gimple_call_set_fndecl (stmt, fndecl);
1477 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1478 The statement may be replaced by another statement, e.g., if the call
1479 simplifies to a constant value. Return true if any changes were made.
1480 It is assumed that the operands have been previously folded. */
1483 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1485 gimple stmt = gsi_stmt (*gsi);
1487 tree callee = gimple_call_fndecl (stmt);
1489 /* Check for builtins that CCP can handle using information not
1490 available in the generic fold routines. */
1491 if (!inplace && callee && DECL_BUILT_IN (callee))
1493 tree result = gimple_fold_builtin (stmt);
1497 if (!update_call_from_tree (gsi, result))
1498 gimplify_and_update_call_from_tree (gsi, result);
1504 /* ??? Should perhaps do this in fold proper. However, doing it
1505 there requires that we create a new CALL_EXPR, and that requires
1506 copying EH region info to the new node. Easier to just do it
1507 here where we can just smash the call operand. */
1508 callee = gimple_call_fn (stmt);
1509 if (TREE_CODE (callee) == OBJ_TYPE_REF)
1510 return gimple_fold_obj_type_ref_call (gsi);
1516 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1517 distinguishes both cases. */
1520 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1522 bool changed = false;
1523 gimple stmt = gsi_stmt (*gsi);
1526 /* Fold the main computation performed by the statement. */
1527 switch (gimple_code (stmt))
1531 unsigned old_num_ops = gimple_num_ops (stmt);
1532 tree new_rhs = fold_gimple_assign (gsi);
1533 tree lhs = gimple_assign_lhs (stmt);
1535 && !useless_type_conversion_p (TREE_TYPE (lhs),
1536 TREE_TYPE (new_rhs)))
1537 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1540 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1542 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1549 changed |= fold_gimple_cond (stmt);
1553 /* Fold *& in call arguments. */
1554 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1555 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1557 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1560 gimple_call_set_arg (stmt, i, tmp);
1564 changed |= gimple_fold_call (gsi, inplace);
1568 /* Fold *& in asm operands. */
1569 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1571 tree link = gimple_asm_output_op (stmt, i);
1572 tree op = TREE_VALUE (link);
1573 if (REFERENCE_CLASS_P (op)
1574 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1576 TREE_VALUE (link) = op;
1580 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1582 tree link = gimple_asm_input_op (stmt, i);
1583 tree op = TREE_VALUE (link);
1584 if (REFERENCE_CLASS_P (op)
1585 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1587 TREE_VALUE (link) = op;
1594 if (gimple_debug_bind_p (stmt))
1596 tree val = gimple_debug_bind_get_value (stmt);
1598 && REFERENCE_CLASS_P (val))
1600 tree tem = maybe_fold_reference (val, false);
1603 gimple_debug_bind_set_value (stmt, tem);
1613 stmt = gsi_stmt (*gsi);
1615 /* Fold *& on the lhs. */
1616 if (gimple_has_lhs (stmt))
1618 tree lhs = gimple_get_lhs (stmt);
1619 if (lhs && REFERENCE_CLASS_P (lhs))
1621 tree new_lhs = maybe_fold_reference (lhs, true);
1624 gimple_set_lhs (stmt, new_lhs);
1633 /* Fold the statement pointed to by GSI. In some cases, this function may
1634 replace the whole statement with a new one. Returns true iff folding
1636 The statement pointed to by GSI should be in valid gimple form but may
1637 be in unfolded state as resulting from for example constant propagation
1638 which can produce *&x = 0. */
1641 fold_stmt (gimple_stmt_iterator *gsi)
1643 return fold_stmt_1 (gsi, false);
1646 /* Perform the minimal folding on statement STMT. Only operations like
1647 *&x created by constant propagation are handled. The statement cannot
1648 be replaced with a new one. Return true if the statement was
1649 changed, false otherwise.
1650 The statement STMT should be in valid gimple form but may
1651 be in unfolded state as resulting from for example constant propagation
1652 which can produce *&x = 0. */
1655 fold_stmt_inplace (gimple stmt)
1657 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1658 bool changed = fold_stmt_1 (&gsi, true);
1659 gcc_assert (gsi_stmt (gsi) == stmt);
1663 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1664 if EXPR is null or we don't know how.
1665 If non-null, the result always has boolean type. */
1668 canonicalize_bool (tree expr, bool invert)
1674 if (integer_nonzerop (expr))
1675 return boolean_false_node;
1676 else if (integer_zerop (expr))
1677 return boolean_true_node;
1678 else if (TREE_CODE (expr) == SSA_NAME)
1679 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1680 build_int_cst (TREE_TYPE (expr), 0));
1681 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1682 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1684 TREE_OPERAND (expr, 0),
1685 TREE_OPERAND (expr, 1));
1691 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1693 if (integer_nonzerop (expr))
1694 return boolean_true_node;
1695 else if (integer_zerop (expr))
1696 return boolean_false_node;
1697 else if (TREE_CODE (expr) == SSA_NAME)
1698 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1699 build_int_cst (TREE_TYPE (expr), 0));
1700 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1701 return fold_build2 (TREE_CODE (expr),
1703 TREE_OPERAND (expr, 0),
1704 TREE_OPERAND (expr, 1));
1710 /* Check to see if a boolean expression EXPR is logically equivalent to the
1711 comparison (OP1 CODE OP2). Check for various identities involving
1715 same_bool_comparison_p (const_tree expr, enum tree_code code,
1716 const_tree op1, const_tree op2)
1720 /* The obvious case. */
1721 if (TREE_CODE (expr) == code
1722 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1723 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1726 /* Check for comparing (name, name != 0) and the case where expr
1727 is an SSA_NAME with a definition matching the comparison. */
1728 if (TREE_CODE (expr) == SSA_NAME
1729 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1731 if (operand_equal_p (expr, op1, 0))
1732 return ((code == NE_EXPR && integer_zerop (op2))
1733 || (code == EQ_EXPR && integer_nonzerop (op2)));
1734 s = SSA_NAME_DEF_STMT (expr);
1735 if (is_gimple_assign (s)
1736 && gimple_assign_rhs_code (s) == code
1737 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1738 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1742 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1743 of name is a comparison, recurse. */
1744 if (TREE_CODE (op1) == SSA_NAME
1745 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1747 s = SSA_NAME_DEF_STMT (op1);
1748 if (is_gimple_assign (s)
1749 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1751 enum tree_code c = gimple_assign_rhs_code (s);
1752 if ((c == NE_EXPR && integer_zerop (op2))
1753 || (c == EQ_EXPR && integer_nonzerop (op2)))
1754 return same_bool_comparison_p (expr, c,
1755 gimple_assign_rhs1 (s),
1756 gimple_assign_rhs2 (s));
1757 if ((c == EQ_EXPR && integer_zerop (op2))
1758 || (c == NE_EXPR && integer_nonzerop (op2)))
1759 return same_bool_comparison_p (expr,
1760 invert_tree_comparison (c, false),
1761 gimple_assign_rhs1 (s),
1762 gimple_assign_rhs2 (s));
1768 /* Check to see if two boolean expressions OP1 and OP2 are logically
1772 same_bool_result_p (const_tree op1, const_tree op2)
1774 /* Simple cases first. */
1775 if (operand_equal_p (op1, op2, 0))
1778 /* Check the cases where at least one of the operands is a comparison.
1779 These are a bit smarter than operand_equal_p in that they apply some
1780 identifies on SSA_NAMEs. */
1781 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1782 && same_bool_comparison_p (op1, TREE_CODE (op2),
1783 TREE_OPERAND (op2, 0),
1784 TREE_OPERAND (op2, 1)))
1786 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1787 && same_bool_comparison_p (op2, TREE_CODE (op1),
1788 TREE_OPERAND (op1, 0),
1789 TREE_OPERAND (op1, 1)))
1796 /* Forward declarations for some mutually recursive functions. */
1799 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1800 enum tree_code code2, tree op2a, tree op2b);
1802 and_var_with_comparison (tree var, bool invert,
1803 enum tree_code code2, tree op2a, tree op2b);
1805 and_var_with_comparison_1 (gimple stmt,
1806 enum tree_code code2, tree op2a, tree op2b);
1808 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1809 enum tree_code code2, tree op2a, tree op2b);
1811 or_var_with_comparison (tree var, bool invert,
1812 enum tree_code code2, tree op2a, tree op2b);
1814 or_var_with_comparison_1 (gimple stmt,
1815 enum tree_code code2, tree op2a, tree op2b);
1817 /* Helper function for and_comparisons_1: try to simplify the AND of the
1818 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1819 If INVERT is true, invert the value of the VAR before doing the AND.
1820 Return NULL_EXPR if we can't simplify this to a single expression. */
1823 and_var_with_comparison (tree var, bool invert,
1824 enum tree_code code2, tree op2a, tree op2b)
1827 gimple stmt = SSA_NAME_DEF_STMT (var);
1829 /* We can only deal with variables whose definitions are assignments. */
1830 if (!is_gimple_assign (stmt))
1833 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1834 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1835 Then we only have to consider the simpler non-inverted cases. */
1837 t = or_var_with_comparison_1 (stmt,
1838 invert_tree_comparison (code2, false),
1841 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1842 return canonicalize_bool (t, invert);
1845 /* Try to simplify the AND of the ssa variable defined by the assignment
1846 STMT with the comparison specified by (OP2A CODE2 OP2B).
1847 Return NULL_EXPR if we can't simplify this to a single expression. */
1850 and_var_with_comparison_1 (gimple stmt,
1851 enum tree_code code2, tree op2a, tree op2b)
1853 tree var = gimple_assign_lhs (stmt);
1854 tree true_test_var = NULL_TREE;
1855 tree false_test_var = NULL_TREE;
1856 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1858 /* Check for identities like (var AND (var == 0)) => false. */
1859 if (TREE_CODE (op2a) == SSA_NAME
1860 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1862 if ((code2 == NE_EXPR && integer_zerop (op2b))
1863 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1865 true_test_var = op2a;
1866 if (var == true_test_var)
1869 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1870 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1872 false_test_var = op2a;
1873 if (var == false_test_var)
1874 return boolean_false_node;
1878 /* If the definition is a comparison, recurse on it. */
1879 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1881 tree t = and_comparisons_1 (innercode,
1882 gimple_assign_rhs1 (stmt),
1883 gimple_assign_rhs2 (stmt),
1891 /* If the definition is an AND or OR expression, we may be able to
1892 simplify by reassociating. */
1893 if (innercode == TRUTH_AND_EXPR
1894 || innercode == TRUTH_OR_EXPR
1895 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1896 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1898 tree inner1 = gimple_assign_rhs1 (stmt);
1899 tree inner2 = gimple_assign_rhs2 (stmt);
1902 tree partial = NULL_TREE;
1903 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1905 /* Check for boolean identities that don't require recursive examination
1907 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1908 inner1 AND (inner1 OR inner2) => inner1
1909 !inner1 AND (inner1 AND inner2) => false
1910 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1911 Likewise for similar cases involving inner2. */
1912 if (inner1 == true_test_var)
1913 return (is_and ? var : inner1);
1914 else if (inner2 == true_test_var)
1915 return (is_and ? var : inner2);
1916 else if (inner1 == false_test_var)
1918 ? boolean_false_node
1919 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1920 else if (inner2 == false_test_var)
1922 ? boolean_false_node
1923 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1925 /* Next, redistribute/reassociate the AND across the inner tests.
1926 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1927 if (TREE_CODE (inner1) == SSA_NAME
1928 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1929 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1930 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1931 gimple_assign_rhs1 (s),
1932 gimple_assign_rhs2 (s),
1933 code2, op2a, op2b)))
1935 /* Handle the AND case, where we are reassociating:
1936 (inner1 AND inner2) AND (op2a code2 op2b)
1938 If the partial result t is a constant, we win. Otherwise
1939 continue on to try reassociating with the other inner test. */
1942 if (integer_onep (t))
1944 else if (integer_zerop (t))
1945 return boolean_false_node;
1948 /* Handle the OR case, where we are redistributing:
1949 (inner1 OR inner2) AND (op2a code2 op2b)
1950 => (t OR (inner2 AND (op2a code2 op2b))) */
1951 else if (integer_onep (t))
1952 return boolean_true_node;
1954 /* Save partial result for later. */
1958 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1959 if (TREE_CODE (inner2) == SSA_NAME
1960 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1961 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1962 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1963 gimple_assign_rhs1 (s),
1964 gimple_assign_rhs2 (s),
1965 code2, op2a, op2b)))
1967 /* Handle the AND case, where we are reassociating:
1968 (inner1 AND inner2) AND (op2a code2 op2b)
1969 => (inner1 AND t) */
1972 if (integer_onep (t))
1974 else if (integer_zerop (t))
1975 return boolean_false_node;
1976 /* If both are the same, we can apply the identity
1978 else if (partial && same_bool_result_p (t, partial))
1982 /* Handle the OR case. where we are redistributing:
1983 (inner1 OR inner2) AND (op2a code2 op2b)
1984 => (t OR (inner1 AND (op2a code2 op2b)))
1985 => (t OR partial) */
1988 if (integer_onep (t))
1989 return boolean_true_node;
1992 /* We already got a simplification for the other
1993 operand to the redistributed OR expression. The
1994 interesting case is when at least one is false.
1995 Or, if both are the same, we can apply the identity
1997 if (integer_zerop (partial))
1999 else if (integer_zerop (t))
2001 else if (same_bool_result_p (t, partial))
2010 /* Try to simplify the AND of two comparisons defined by
2011 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2012 If this can be done without constructing an intermediate value,
2013 return the resulting tree; otherwise NULL_TREE is returned.
2014 This function is deliberately asymmetric as it recurses on SSA_DEFs
2015 in the first comparison but not the second. */
2018 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2019 enum tree_code code2, tree op2a, tree op2b)
2021 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2022 if (operand_equal_p (op1a, op2a, 0)
2023 && operand_equal_p (op1b, op2b, 0))
2025 tree t = combine_comparisons (UNKNOWN_LOCATION,
2026 TRUTH_ANDIF_EXPR, code1, code2,
2027 boolean_type_node, op1a, op1b);
2032 /* Likewise the swapped case of the above. */
2033 if (operand_equal_p (op1a, op2b, 0)
2034 && operand_equal_p (op1b, op2a, 0))
2036 tree t = combine_comparisons (UNKNOWN_LOCATION,
2037 TRUTH_ANDIF_EXPR, code1,
2038 swap_tree_comparison (code2),
2039 boolean_type_node, op1a, op1b);
2044 /* If both comparisons are of the same value against constants, we might
2045 be able to merge them. */
2046 if (operand_equal_p (op1a, op2a, 0)
2047 && TREE_CODE (op1b) == INTEGER_CST
2048 && TREE_CODE (op2b) == INTEGER_CST)
2050 int cmp = tree_int_cst_compare (op1b, op2b);
2052 /* If we have (op1a == op1b), we should either be able to
2053 return that or FALSE, depending on whether the constant op1b
2054 also satisfies the other comparison against op2b. */
2055 if (code1 == EQ_EXPR)
2061 case EQ_EXPR: val = (cmp == 0); break;
2062 case NE_EXPR: val = (cmp != 0); break;
2063 case LT_EXPR: val = (cmp < 0); break;
2064 case GT_EXPR: val = (cmp > 0); break;
2065 case LE_EXPR: val = (cmp <= 0); break;
2066 case GE_EXPR: val = (cmp >= 0); break;
2067 default: done = false;
2072 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2074 return boolean_false_node;
2077 /* Likewise if the second comparison is an == comparison. */
2078 else if (code2 == EQ_EXPR)
2084 case EQ_EXPR: val = (cmp == 0); break;
2085 case NE_EXPR: val = (cmp != 0); break;
2086 case LT_EXPR: val = (cmp > 0); break;
2087 case GT_EXPR: val = (cmp < 0); break;
2088 case LE_EXPR: val = (cmp >= 0); break;
2089 case GE_EXPR: val = (cmp <= 0); break;
2090 default: done = false;
2095 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2097 return boolean_false_node;
2101 /* Same business with inequality tests. */
2102 else if (code1 == NE_EXPR)
2107 case EQ_EXPR: val = (cmp != 0); break;
2108 case NE_EXPR: val = (cmp == 0); break;
2109 case LT_EXPR: val = (cmp >= 0); break;
2110 case GT_EXPR: val = (cmp <= 0); break;
2111 case LE_EXPR: val = (cmp > 0); break;
2112 case GE_EXPR: val = (cmp < 0); break;
2117 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2119 else if (code2 == NE_EXPR)
2124 case EQ_EXPR: val = (cmp == 0); break;
2125 case NE_EXPR: val = (cmp != 0); break;
2126 case LT_EXPR: val = (cmp <= 0); break;
2127 case GT_EXPR: val = (cmp >= 0); break;
2128 case LE_EXPR: val = (cmp < 0); break;
2129 case GE_EXPR: val = (cmp > 0); break;
2134 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2137 /* Chose the more restrictive of two < or <= comparisons. */
2138 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2139 && (code2 == LT_EXPR || code2 == LE_EXPR))
2141 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2142 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2144 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2147 /* Likewise chose the more restrictive of two > or >= comparisons. */
2148 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2149 && (code2 == GT_EXPR || code2 == GE_EXPR))
2151 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2152 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2154 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2157 /* Check for singleton ranges. */
2159 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2160 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2161 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2163 /* Check for disjoint ranges. */
2165 && (code1 == LT_EXPR || code1 == LE_EXPR)
2166 && (code2 == GT_EXPR || code2 == GE_EXPR))
2167 return boolean_false_node;
2169 && (code1 == GT_EXPR || code1 == GE_EXPR)
2170 && (code2 == LT_EXPR || code2 == LE_EXPR))
2171 return boolean_false_node;
2174 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2175 NAME's definition is a truth value. See if there are any simplifications
2176 that can be done against the NAME's definition. */
2177 if (TREE_CODE (op1a) == SSA_NAME
2178 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2179 && (integer_zerop (op1b) || integer_onep (op1b)))
2181 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2182 || (code1 == NE_EXPR && integer_onep (op1b)));
2183 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2184 switch (gimple_code (stmt))
2187 /* Try to simplify by copy-propagating the definition. */
2188 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2191 /* If every argument to the PHI produces the same result when
2192 ANDed with the second comparison, we win.
2193 Do not do this unless the type is bool since we need a bool
2194 result here anyway. */
2195 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2197 tree result = NULL_TREE;
2199 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2201 tree arg = gimple_phi_arg_def (stmt, i);
2203 /* If this PHI has itself as an argument, ignore it.
2204 If all the other args produce the same result,
2206 if (arg == gimple_phi_result (stmt))
2208 else if (TREE_CODE (arg) == INTEGER_CST)
2210 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2213 result = boolean_false_node;
2214 else if (!integer_zerop (result))
2218 result = fold_build2 (code2, boolean_type_node,
2220 else if (!same_bool_comparison_p (result,
2224 else if (TREE_CODE (arg) == SSA_NAME)
2226 tree temp = and_var_with_comparison (arg, invert,
2232 else if (!same_bool_result_p (result, temp))
2248 /* Try to simplify the AND of two comparisons, specified by
2249 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2250 If this can be simplified to a single expression (without requiring
2251 introducing more SSA variables to hold intermediate values),
2252 return the resulting tree. Otherwise return NULL_TREE.
2253 If the result expression is non-null, it has boolean type. */
2256 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2257 enum tree_code code2, tree op2a, tree op2b)
2259 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2263 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2266 /* Helper function for or_comparisons_1: try to simplify the OR of the
2267 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2268 If INVERT is true, invert the value of VAR before doing the OR.
2269 Return NULL_EXPR if we can't simplify this to a single expression. */
2272 or_var_with_comparison (tree var, bool invert,
2273 enum tree_code code2, tree op2a, tree op2b)
2276 gimple stmt = SSA_NAME_DEF_STMT (var);
2278 /* We can only deal with variables whose definitions are assignments. */
2279 if (!is_gimple_assign (stmt))
2282 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2283 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2284 Then we only have to consider the simpler non-inverted cases. */
2286 t = and_var_with_comparison_1 (stmt,
2287 invert_tree_comparison (code2, false),
2290 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2291 return canonicalize_bool (t, invert);
2294 /* Try to simplify the OR of the ssa variable defined by the assignment
2295 STMT with the comparison specified by (OP2A CODE2 OP2B).
2296 Return NULL_EXPR if we can't simplify this to a single expression. */
2299 or_var_with_comparison_1 (gimple stmt,
2300 enum tree_code code2, tree op2a, tree op2b)
2302 tree var = gimple_assign_lhs (stmt);
2303 tree true_test_var = NULL_TREE;
2304 tree false_test_var = NULL_TREE;
2305 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2307 /* Check for identities like (var OR (var != 0)) => true . */
2308 if (TREE_CODE (op2a) == SSA_NAME
2309 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2311 if ((code2 == NE_EXPR && integer_zerop (op2b))
2312 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2314 true_test_var = op2a;
2315 if (var == true_test_var)
2318 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2319 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2321 false_test_var = op2a;
2322 if (var == false_test_var)
2323 return boolean_true_node;
2327 /* If the definition is a comparison, recurse on it. */
2328 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2330 tree t = or_comparisons_1 (innercode,
2331 gimple_assign_rhs1 (stmt),
2332 gimple_assign_rhs2 (stmt),
2340 /* If the definition is an AND or OR expression, we may be able to
2341 simplify by reassociating. */
2342 if (innercode == TRUTH_AND_EXPR
2343 || innercode == TRUTH_OR_EXPR
2344 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2345 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2347 tree inner1 = gimple_assign_rhs1 (stmt);
2348 tree inner2 = gimple_assign_rhs2 (stmt);
2351 tree partial = NULL_TREE;
2352 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2354 /* Check for boolean identities that don't require recursive examination
2356 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2357 inner1 OR (inner1 AND inner2) => inner1
2358 !inner1 OR (inner1 OR inner2) => true
2359 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2361 if (inner1 == true_test_var)
2362 return (is_or ? var : inner1);
2363 else if (inner2 == true_test_var)
2364 return (is_or ? var : inner2);
2365 else if (inner1 == false_test_var)
2368 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2369 else if (inner2 == false_test_var)
2372 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2374 /* Next, redistribute/reassociate the OR across the inner tests.
2375 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2376 if (TREE_CODE (inner1) == SSA_NAME
2377 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2378 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2379 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2380 gimple_assign_rhs1 (s),
2381 gimple_assign_rhs2 (s),
2382 code2, op2a, op2b)))
2384 /* Handle the OR case, where we are reassociating:
2385 (inner1 OR inner2) OR (op2a code2 op2b)
2387 If the partial result t is a constant, we win. Otherwise
2388 continue on to try reassociating with the other inner test. */
2391 if (integer_onep (t))
2392 return boolean_true_node;
2393 else if (integer_zerop (t))
2397 /* Handle the AND case, where we are redistributing:
2398 (inner1 AND inner2) OR (op2a code2 op2b)
2399 => (t AND (inner2 OR (op2a code op2b))) */
2400 else if (integer_zerop (t))
2401 return boolean_false_node;
2403 /* Save partial result for later. */
2407 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2408 if (TREE_CODE (inner2) == SSA_NAME
2409 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2410 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2411 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2412 gimple_assign_rhs1 (s),
2413 gimple_assign_rhs2 (s),
2414 code2, op2a, op2b)))
2416 /* Handle the OR case, where we are reassociating:
2417 (inner1 OR inner2) OR (op2a code2 op2b)
2419 => (t OR partial) */
2422 if (integer_zerop (t))
2424 else if (integer_onep (t))
2425 return boolean_true_node;
2426 /* If both are the same, we can apply the identity
2428 else if (partial && same_bool_result_p (t, partial))
2432 /* Handle the AND case, where we are redistributing:
2433 (inner1 AND inner2) OR (op2a code2 op2b)
2434 => (t AND (inner1 OR (op2a code2 op2b)))
2435 => (t AND partial) */
2438 if (integer_zerop (t))
2439 return boolean_false_node;
2442 /* We already got a simplification for the other
2443 operand to the redistributed AND expression. The
2444 interesting case is when at least one is true.
2445 Or, if both are the same, we can apply the identity
2447 if (integer_onep (partial))
2449 else if (integer_onep (t))
2451 else if (same_bool_result_p (t, partial))
2460 /* Try to simplify the OR of two comparisons defined by
2461 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2462 If this can be done without constructing an intermediate value,
2463 return the resulting tree; otherwise NULL_TREE is returned.
2464 This function is deliberately asymmetric as it recurses on SSA_DEFs
2465 in the first comparison but not the second. */
2468 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2469 enum tree_code code2, tree op2a, tree op2b)
2471 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2472 if (operand_equal_p (op1a, op2a, 0)
2473 && operand_equal_p (op1b, op2b, 0))
2475 tree t = combine_comparisons (UNKNOWN_LOCATION,
2476 TRUTH_ORIF_EXPR, code1, code2,
2477 boolean_type_node, op1a, op1b);
2482 /* Likewise the swapped case of the above. */
2483 if (operand_equal_p (op1a, op2b, 0)
2484 && operand_equal_p (op1b, op2a, 0))
2486 tree t = combine_comparisons (UNKNOWN_LOCATION,
2487 TRUTH_ORIF_EXPR, code1,
2488 swap_tree_comparison (code2),
2489 boolean_type_node, op1a, op1b);
2494 /* If both comparisons are of the same value against constants, we might
2495 be able to merge them. */
2496 if (operand_equal_p (op1a, op2a, 0)
2497 && TREE_CODE (op1b) == INTEGER_CST
2498 && TREE_CODE (op2b) == INTEGER_CST)
2500 int cmp = tree_int_cst_compare (op1b, op2b);
2502 /* If we have (op1a != op1b), we should either be able to
2503 return that or TRUE, depending on whether the constant op1b
2504 also satisfies the other comparison against op2b. */
2505 if (code1 == NE_EXPR)
2511 case EQ_EXPR: val = (cmp == 0); break;
2512 case NE_EXPR: val = (cmp != 0); break;
2513 case LT_EXPR: val = (cmp < 0); break;
2514 case GT_EXPR: val = (cmp > 0); break;
2515 case LE_EXPR: val = (cmp <= 0); break;
2516 case GE_EXPR: val = (cmp >= 0); break;
2517 default: done = false;
2522 return boolean_true_node;
2524 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2527 /* Likewise if the second comparison is a != comparison. */
2528 else if (code2 == NE_EXPR)
2534 case EQ_EXPR: val = (cmp == 0); break;
2535 case NE_EXPR: val = (cmp != 0); break;
2536 case LT_EXPR: val = (cmp > 0); break;
2537 case GT_EXPR: val = (cmp < 0); break;
2538 case LE_EXPR: val = (cmp >= 0); break;
2539 case GE_EXPR: val = (cmp <= 0); break;
2540 default: done = false;
2545 return boolean_true_node;
2547 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2551 /* See if an equality test is redundant with the other comparison. */
2552 else if (code1 == EQ_EXPR)
2557 case EQ_EXPR: val = (cmp == 0); break;
2558 case NE_EXPR: val = (cmp != 0); break;
2559 case LT_EXPR: val = (cmp < 0); break;
2560 case GT_EXPR: val = (cmp > 0); break;
2561 case LE_EXPR: val = (cmp <= 0); break;
2562 case GE_EXPR: val = (cmp >= 0); break;
2567 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2569 else if (code2 == EQ_EXPR)
2574 case EQ_EXPR: val = (cmp == 0); break;
2575 case NE_EXPR: val = (cmp != 0); break;
2576 case LT_EXPR: val = (cmp > 0); break;
2577 case GT_EXPR: val = (cmp < 0); break;
2578 case LE_EXPR: val = (cmp >= 0); break;
2579 case GE_EXPR: val = (cmp <= 0); break;
2584 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2587 /* Chose the less restrictive of two < or <= comparisons. */
2588 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2589 && (code2 == LT_EXPR || code2 == LE_EXPR))
2591 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2592 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2594 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2597 /* Likewise chose the less restrictive of two > or >= comparisons. */
2598 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2599 && (code2 == GT_EXPR || code2 == GE_EXPR))
2601 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2602 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2604 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2607 /* Check for singleton ranges. */
2609 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2610 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2611 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2613 /* Check for less/greater pairs that don't restrict the range at all. */
2615 && (code1 == LT_EXPR || code1 == LE_EXPR)
2616 && (code2 == GT_EXPR || code2 == GE_EXPR))
2617 return boolean_true_node;
2619 && (code1 == GT_EXPR || code1 == GE_EXPR)
2620 && (code2 == LT_EXPR || code2 == LE_EXPR))
2621 return boolean_true_node;
2624 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2625 NAME's definition is a truth value. See if there are any simplifications
2626 that can be done against the NAME's definition. */
2627 if (TREE_CODE (op1a) == SSA_NAME
2628 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2629 && (integer_zerop (op1b) || integer_onep (op1b)))
2631 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2632 || (code1 == NE_EXPR && integer_onep (op1b)));
2633 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2634 switch (gimple_code (stmt))
2637 /* Try to simplify by copy-propagating the definition. */
2638 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2641 /* If every argument to the PHI produces the same result when
2642 ORed with the second comparison, we win.
2643 Do not do this unless the type is bool since we need a bool
2644 result here anyway. */
2645 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2647 tree result = NULL_TREE;
2649 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2651 tree arg = gimple_phi_arg_def (stmt, i);
2653 /* If this PHI has itself as an argument, ignore it.
2654 If all the other args produce the same result,
2656 if (arg == gimple_phi_result (stmt))
2658 else if (TREE_CODE (arg) == INTEGER_CST)
2660 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2663 result = boolean_true_node;
2664 else if (!integer_onep (result))
2668 result = fold_build2 (code2, boolean_type_node,
2670 else if (!same_bool_comparison_p (result,
2674 else if (TREE_CODE (arg) == SSA_NAME)
2676 tree temp = or_var_with_comparison (arg, invert,
2682 else if (!same_bool_result_p (result, temp))
2698 /* Try to simplify the OR of two comparisons, specified by
2699 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2700 If this can be simplified to a single expression (without requiring
2701 introducing more SSA variables to hold intermediate values),
2702 return the resulting tree. Otherwise return NULL_TREE.
2703 If the result expression is non-null, it has boolean type. */
2706 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2707 enum tree_code code2, tree op2a, tree op2b)
2709 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2713 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);