1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
56 can_refer_decl_in_current_unit_p (tree decl)
58 struct varpool_node *vnode;
59 struct cgraph_node *node;
61 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
66 && TREE_CODE (decl) == VAR_DECL)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
71 gcc_checking_assert (!(vnode = varpool_get_node (decl))
72 || !vnode->finalized);
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
83 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl))
88 if (TREE_CODE (decl) == FUNCTION_DECL)
90 node = cgraph_get_node (decl);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
96 if (!node || !node->analyzed || node->global.inlined_to)
99 else if (TREE_CODE (decl) == VAR_DECL)
101 vnode = varpool_get_node (decl);
102 if (!vnode || !vnode->finalized)
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
112 canonicalize_constructor_val (tree cval)
115 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
117 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
118 TREE_OPERAND (cval, 0),
119 TREE_OPERAND (cval, 1),
124 if (TREE_CODE (cval) == ADDR_EXPR)
126 tree base = get_base_address (TREE_OPERAND (cval, 0));
129 && (TREE_CODE (base) == VAR_DECL
130 || TREE_CODE (base) == FUNCTION_DECL)
131 && !can_refer_decl_in_current_unit_p (base))
133 if (base && TREE_CODE (base) == VAR_DECL)
134 add_referenced_var (base);
135 /* We never have the chance to fixup types in global initializers
136 during gimplification. Do so here. */
137 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
138 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
143 /* If SYM is a constant variable with known value, return the value.
144 NULL_TREE is returned otherwise. */
147 get_symbol_constant_value (tree sym)
149 if (const_value_known_p (sym))
151 tree val = DECL_INITIAL (sym);
154 val = canonicalize_constructor_val (val);
155 if (val && is_gimple_min_invariant (val))
160 /* Variables declared 'const' without an initializer
161 have zero as the initializer if they may not be
162 overridden at link or run time. */
164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
166 return build_zero_cst (TREE_TYPE (sym));
173 /* Return true if we may propagate the address expression ADDR into the
174 dereference DEREF and cancel them. */
177 may_propagate_address_into_dereference (tree addr, tree deref)
179 gcc_assert (TREE_CODE (deref) == MEM_REF
180 && TREE_CODE (addr) == ADDR_EXPR);
182 /* Don't propagate if ADDR's operand has incomplete type. */
183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
186 /* If the address is invariant then we do not need to preserve restrict
187 qualifications. But we do need to preserve volatile qualifiers until
188 we can annotate the folded dereference itself properly. */
189 if (is_gimple_min_invariant (addr)
190 && (!TREE_THIS_VOLATILE (deref)
191 || TYPE_VOLATILE (TREE_TYPE (addr))))
192 return useless_type_conversion_p (TREE_TYPE (deref),
193 TREE_TYPE (TREE_OPERAND (addr, 0)));
195 /* Else both the address substitution and the folding must result in
196 a valid useless type conversion sequence. */
197 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
199 && useless_type_conversion_p (TREE_TYPE (deref),
200 TREE_TYPE (TREE_OPERAND (addr, 0))));
204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
205 BASE is an array type. OFFSET is a byte displacement.
207 LOC is the location of the original expression. */
210 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
212 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
213 tree array_type, elt_type, elt_size;
216 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
217 measured in units of the size of elements type) from that ARRAY_REF).
218 We can't do anything if either is variable.
220 The case we handle here is *(&A[N]+O). */
221 if (TREE_CODE (base) == ARRAY_REF)
223 tree low_bound = array_ref_low_bound (base);
225 elt_offset = TREE_OPERAND (base, 1);
226 if (TREE_CODE (low_bound) != INTEGER_CST
227 || TREE_CODE (elt_offset) != INTEGER_CST)
230 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
231 base = TREE_OPERAND (base, 0);
234 /* Ignore stupid user tricks of indexing non-array variables. */
235 array_type = TREE_TYPE (base);
236 if (TREE_CODE (array_type) != ARRAY_TYPE)
238 elt_type = TREE_TYPE (array_type);
240 /* Use signed size type for intermediate computation on the index. */
241 idx_type = ssizetype;
243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
244 element type (so we can use the alignment if it's not constant).
245 Otherwise, compute the offset as an index by using a division. If the
246 division isn't exact, then don't do anything. */
247 elt_size = TYPE_SIZE_UNIT (elt_type);
250 if (integer_zerop (offset))
252 if (TREE_CODE (elt_size) != INTEGER_CST)
253 elt_size = size_int (TYPE_ALIGN (elt_type));
255 idx = build_int_cst (idx_type, 0);
259 unsigned HOST_WIDE_INT lquo, lrem;
260 HOST_WIDE_INT hquo, hrem;
263 /* The final array offset should be signed, so we need
264 to sign-extend the (possibly pointer) offset here
265 and use signed division. */
266 soffset = double_int_sext (tree_to_double_int (offset),
267 TYPE_PRECISION (TREE_TYPE (offset)));
268 if (TREE_CODE (elt_size) != INTEGER_CST
269 || div_and_round_double (TRUNC_DIV_EXPR, 0,
270 soffset.low, soffset.high,
271 TREE_INT_CST_LOW (elt_size),
272 TREE_INT_CST_HIGH (elt_size),
273 &lquo, &hquo, &lrem, &hrem)
277 idx = build_int_cst_wide (idx_type, lquo, hquo);
280 /* Assume the low bound is zero. If there is a domain type, get the
281 low bound, if any, convert the index into that type, and add the
283 min_idx = build_int_cst (idx_type, 0);
284 domain_type = TYPE_DOMAIN (array_type);
287 idx_type = domain_type;
288 if (TYPE_MIN_VALUE (idx_type))
289 min_idx = TYPE_MIN_VALUE (idx_type);
291 min_idx = fold_convert (idx_type, min_idx);
293 if (TREE_CODE (min_idx) != INTEGER_CST)
296 elt_offset = fold_convert (idx_type, elt_offset);
299 if (!integer_zerop (min_idx))
300 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
301 if (!integer_zerop (elt_offset))
302 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
304 /* Make sure to possibly truncate late after offsetting. */
305 idx = fold_convert (idx_type, idx);
307 /* We don't want to construct access past array bounds. For example
310 should not be simplified into (*c)[14] or tree-vrp will
312 This is only an issue for multi-dimensional arrays. */
313 if (TREE_CODE (elt_type) == ARRAY_TYPE
316 if (TYPE_MAX_VALUE (domain_type)
317 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
320 else if (TYPE_MIN_VALUE (domain_type)
321 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
322 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
324 else if (compare_tree_int (idx, 0) < 0)
329 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
330 SET_EXPR_LOCATION (t, loc);
336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
337 LOC is the location of original expression.
339 Before attempting the conversion strip off existing ADDR_EXPRs. */
342 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
348 if (TREE_CODE (base) != ADDR_EXPR)
351 base = TREE_OPERAND (base, 0);
352 if (types_compatible_p (orig_type, TREE_TYPE (base))
353 && integer_zerop (offset))
356 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
357 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
363 LOC is the location of the original expression. */
366 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
372 if (TREE_CODE (addr) != ADDR_EXPR)
374 base = TREE_OPERAND (addr, 0);
375 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
378 ret = build_fold_addr_expr (ret);
379 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
381 SET_EXPR_LOCATION (ret, loc);
388 /* A quaint feature extant in our address arithmetic is that there
389 can be hidden type changes here. The type of the result need
390 not be the same as the type of the input pointer.
392 What we're after here is an expression of the form
393 (T *)(&array + const)
394 where array is OP0, const is OP1, RES_TYPE is T and
395 the cast doesn't actually exist, but is implicit in the
396 type of the POINTER_PLUS_EXPR. We'd like to turn this into
398 which may be able to propagate further. */
401 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
406 /* The first operand should be an ADDR_EXPR. */
407 if (TREE_CODE (op0) != ADDR_EXPR)
409 op0 = TREE_OPERAND (op0, 0);
411 /* It had better be a constant. */
412 if (TREE_CODE (op1) != INTEGER_CST)
414 /* Or op0 should now be A[0] and the non-constant offset defined
415 via a multiplication by the array element size. */
416 if (TREE_CODE (op0) == ARRAY_REF
417 /* As we will end up creating a variable index array access
418 in the outermost array dimension make sure there isn't
419 a more inner array that the index could overflow to. */
420 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
421 && integer_zerop (TREE_OPERAND (op0, 1))
422 && TREE_CODE (op1) == SSA_NAME)
424 gimple offset_def = SSA_NAME_DEF_STMT (op1);
425 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
426 if (!host_integerp (elsz, 1)
427 || !is_gimple_assign (offset_def))
430 /* Do not build array references of something that we can't
431 see the true number of array dimensions for. */
432 if (!DECL_P (TREE_OPERAND (op0, 0))
433 && !handled_component_p (TREE_OPERAND (op0, 0)))
436 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
437 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
439 return build_fold_addr_expr
440 (build4 (ARRAY_REF, TREE_TYPE (op0),
441 TREE_OPERAND (op0, 0),
442 gimple_assign_rhs1 (offset_def),
443 TREE_OPERAND (op0, 2),
444 TREE_OPERAND (op0, 3)));
445 else if (integer_onep (elsz)
446 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
447 return build_fold_addr_expr
448 (build4 (ARRAY_REF, TREE_TYPE (op0),
449 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 2),
452 TREE_OPERAND (op0, 3)));
454 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
457 && TREE_CODE (op1) == SSA_NAME)
459 gimple offset_def = SSA_NAME_DEF_STMT (op1);
460 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
461 if (!host_integerp (elsz, 1)
462 || !is_gimple_assign (offset_def))
465 /* Do not build array references of something that we can't
466 see the true number of array dimensions for. */
468 && !handled_component_p (op0))
471 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
472 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
474 return build_fold_addr_expr
475 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
476 op0, gimple_assign_rhs1 (offset_def),
477 integer_zero_node, NULL_TREE));
478 else if (integer_onep (elsz)
479 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
480 return build_fold_addr_expr
481 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
483 integer_zero_node, NULL_TREE));
489 /* If the first operand is an ARRAY_REF, expand it so that we can fold
490 the offset into it. */
491 while (TREE_CODE (op0) == ARRAY_REF)
493 tree array_obj = TREE_OPERAND (op0, 0);
494 tree array_idx = TREE_OPERAND (op0, 1);
495 tree elt_type = TREE_TYPE (op0);
496 tree elt_size = TYPE_SIZE_UNIT (elt_type);
499 if (TREE_CODE (array_idx) != INTEGER_CST)
501 if (TREE_CODE (elt_size) != INTEGER_CST)
504 /* Un-bias the index by the min index of the array type. */
505 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
508 min_idx = TYPE_MIN_VALUE (min_idx);
511 if (TREE_CODE (min_idx) != INTEGER_CST)
514 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
515 if (!integer_zerop (min_idx))
516 array_idx = int_const_binop (MINUS_EXPR, array_idx,
521 /* Convert the index to a byte offset. */
522 array_idx = fold_convert (sizetype, array_idx);
523 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
525 /* Update the operands for the next round, or for folding. */
526 op1 = int_const_binop (PLUS_EXPR,
531 ptd_type = TREE_TYPE (res_type);
532 /* If we want a pointer to void, reconstruct the reference from the
533 array element type. A pointer to that can be trivially converted
534 to void *. This happens as we fold (void *)(ptr p+ off). */
535 if (VOID_TYPE_P (ptd_type)
536 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
537 ptd_type = TREE_TYPE (TREE_TYPE (op0));
539 /* At which point we can try some of the same things as for indirects. */
540 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
543 t = build_fold_addr_expr (t);
544 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
546 SET_EXPR_LOCATION (t, loc);
552 /* Subroutine of fold_stmt. We perform several simplifications of the
553 memory reference tree EXPR and make sure to re-gimplify them properly
554 after propagation of constant addresses. IS_LHS is true if the
555 reference is supposed to be an lvalue. */
558 maybe_fold_reference (tree expr, bool is_lhs)
564 && (result = fold_const_aggregate_ref (expr))
565 && is_gimple_min_invariant (result))
568 /* ??? We might want to open-code the relevant remaining cases
569 to avoid using the generic fold. */
570 if (handled_component_p (*t)
571 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
573 tree tem = fold (*t);
578 while (handled_component_p (*t))
579 t = &TREE_OPERAND (*t, 0);
581 /* Fold back MEM_REFs to reference trees. */
582 if (TREE_CODE (*t) == MEM_REF
583 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
584 && integer_zerop (TREE_OPERAND (*t, 1))
585 && (TREE_THIS_VOLATILE (*t)
586 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
587 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
588 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
589 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
590 /* We have to look out here to not drop a required conversion
591 from the rhs to the lhs if is_lhs, but we don't have the
592 rhs here to verify that. Thus require strict type
594 && types_compatible_p (TREE_TYPE (*t),
595 TREE_TYPE (TREE_OPERAND
596 (TREE_OPERAND (*t, 0), 0))))
599 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
600 tem = maybe_fold_reference (expr, is_lhs);
605 /* Canonicalize MEM_REFs invariant address operand. */
606 else if (TREE_CODE (*t) == MEM_REF
607 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
609 bool volatile_p = TREE_THIS_VOLATILE (*t);
610 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
611 TREE_OPERAND (*t, 0),
612 TREE_OPERAND (*t, 1));
615 TREE_THIS_VOLATILE (tem) = volatile_p;
617 tem = maybe_fold_reference (expr, is_lhs);
623 else if (TREE_CODE (*t) == TARGET_MEM_REF)
625 tree tem = maybe_fold_tmr (*t);
629 tem = maybe_fold_reference (expr, is_lhs);
638 tree tem = get_symbol_constant_value (*t);
640 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
642 *t = unshare_expr (tem);
643 tem = maybe_fold_reference (expr, is_lhs);
654 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
655 replacement rhs for the statement or NULL_TREE if no simplification
656 could be made. It is assumed that the operands have been previously
660 fold_gimple_assign (gimple_stmt_iterator *si)
662 gimple stmt = gsi_stmt (*si);
663 enum tree_code subcode = gimple_assign_rhs_code (stmt);
664 location_t loc = gimple_location (stmt);
666 tree result = NULL_TREE;
668 switch (get_gimple_rhs_class (subcode))
670 case GIMPLE_SINGLE_RHS:
672 tree rhs = gimple_assign_rhs1 (stmt);
674 /* Try to fold a conditional expression. */
675 if (TREE_CODE (rhs) == COND_EXPR)
677 tree op0 = COND_EXPR_COND (rhs);
680 location_t cond_loc = EXPR_LOCATION (rhs);
682 if (COMPARISON_CLASS_P (op0))
684 fold_defer_overflow_warnings ();
685 tem = fold_binary_loc (cond_loc,
686 TREE_CODE (op0), TREE_TYPE (op0),
687 TREE_OPERAND (op0, 0),
688 TREE_OPERAND (op0, 1));
689 /* This is actually a conditional expression, not a GIMPLE
690 conditional statement, however, the valid_gimple_rhs_p
691 test still applies. */
692 set = (tem && is_gimple_condexpr (tem)
693 && valid_gimple_rhs_p (tem));
694 fold_undefer_overflow_warnings (set, stmt, 0);
696 else if (is_gimple_min_invariant (op0))
705 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
706 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
709 else if (REFERENCE_CLASS_P (rhs))
710 return maybe_fold_reference (rhs, false);
712 else if (TREE_CODE (rhs) == ADDR_EXPR)
714 tree ref = TREE_OPERAND (rhs, 0);
715 tree tem = maybe_fold_reference (ref, true);
717 && TREE_CODE (tem) == MEM_REF
718 && integer_zerop (TREE_OPERAND (tem, 1)))
719 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
721 result = fold_convert (TREE_TYPE (rhs),
722 build_fold_addr_expr_loc (loc, tem));
723 else if (TREE_CODE (ref) == MEM_REF
724 && integer_zerop (TREE_OPERAND (ref, 1)))
725 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
728 else if (TREE_CODE (rhs) == CONSTRUCTOR
729 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
730 && (CONSTRUCTOR_NELTS (rhs)
731 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
733 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
737 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
738 if (TREE_CODE (val) != INTEGER_CST
739 && TREE_CODE (val) != REAL_CST
740 && TREE_CODE (val) != FIXED_CST)
743 return build_vector_from_ctor (TREE_TYPE (rhs),
744 CONSTRUCTOR_ELTS (rhs));
747 else if (DECL_P (rhs))
748 return unshare_expr (get_symbol_constant_value (rhs));
750 /* If we couldn't fold the RHS, hand over to the generic
752 if (result == NULL_TREE)
755 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
756 that may have been added by fold, and "useless" type
757 conversions that might now be apparent due to propagation. */
758 STRIP_USELESS_TYPE_CONVERSION (result);
760 if (result != rhs && valid_gimple_rhs_p (result))
767 case GIMPLE_UNARY_RHS:
769 tree rhs = gimple_assign_rhs1 (stmt);
771 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
774 /* If the operation was a conversion do _not_ mark a
775 resulting constant with TREE_OVERFLOW if the original
776 constant was not. These conversions have implementation
777 defined behavior and retaining the TREE_OVERFLOW flag
778 here would confuse later passes such as VRP. */
779 if (CONVERT_EXPR_CODE_P (subcode)
780 && TREE_CODE (result) == INTEGER_CST
781 && TREE_CODE (rhs) == INTEGER_CST)
782 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
784 STRIP_USELESS_TYPE_CONVERSION (result);
785 if (valid_gimple_rhs_p (result))
788 else if (CONVERT_EXPR_CODE_P (subcode)
789 && POINTER_TYPE_P (gimple_expr_type (stmt))
790 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
792 tree type = gimple_expr_type (stmt);
793 tree t = maybe_fold_offset_to_address (loc,
794 gimple_assign_rhs1 (stmt),
795 integer_zero_node, type);
802 case GIMPLE_BINARY_RHS:
803 /* Try to fold pointer addition. */
804 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
806 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
807 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
809 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
810 if (!useless_type_conversion_p
811 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
812 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
814 result = maybe_fold_stmt_addition (gimple_location (stmt),
816 gimple_assign_rhs1 (stmt),
817 gimple_assign_rhs2 (stmt));
821 result = fold_binary_loc (loc, subcode,
822 TREE_TYPE (gimple_assign_lhs (stmt)),
823 gimple_assign_rhs1 (stmt),
824 gimple_assign_rhs2 (stmt));
828 STRIP_USELESS_TYPE_CONVERSION (result);
829 if (valid_gimple_rhs_p (result))
832 /* Fold might have produced non-GIMPLE, so if we trust it blindly
833 we lose canonicalization opportunities. Do not go again
834 through fold here though, or the same non-GIMPLE will be
836 if (commutative_tree_code (subcode)
837 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
838 gimple_assign_rhs2 (stmt), false))
839 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
840 gimple_assign_rhs2 (stmt),
841 gimple_assign_rhs1 (stmt));
845 case GIMPLE_TERNARY_RHS:
846 result = fold_ternary_loc (loc, subcode,
847 TREE_TYPE (gimple_assign_lhs (stmt)),
848 gimple_assign_rhs1 (stmt),
849 gimple_assign_rhs2 (stmt),
850 gimple_assign_rhs3 (stmt));
854 STRIP_USELESS_TYPE_CONVERSION (result);
855 if (valid_gimple_rhs_p (result))
858 /* Fold might have produced non-GIMPLE, so if we trust it blindly
859 we lose canonicalization opportunities. Do not go again
860 through fold here though, or the same non-GIMPLE will be
862 if (commutative_ternary_tree_code (subcode)
863 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
864 gimple_assign_rhs2 (stmt), false))
865 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
866 gimple_assign_rhs2 (stmt),
867 gimple_assign_rhs1 (stmt),
868 gimple_assign_rhs3 (stmt));
872 case GIMPLE_INVALID_RHS:
879 /* Attempt to fold a conditional statement. Return true if any changes were
880 made. We only attempt to fold the condition expression, and do not perform
881 any transformation that would require alteration of the cfg. It is
882 assumed that the operands have been previously folded. */
885 fold_gimple_cond (gimple stmt)
887 tree result = fold_binary_loc (gimple_location (stmt),
888 gimple_cond_code (stmt),
890 gimple_cond_lhs (stmt),
891 gimple_cond_rhs (stmt));
895 STRIP_USELESS_TYPE_CONVERSION (result);
896 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
898 gimple_cond_set_condition_from_tree (stmt, result);
906 /* Convert EXPR into a GIMPLE value suitable for substitution on the
907 RHS of an assignment. Insert the necessary statements before
908 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
909 is replaced. If the call is expected to produces a result, then it
910 is replaced by an assignment of the new RHS to the result variable.
911 If the result is to be ignored, then the call is replaced by a
912 GIMPLE_NOP. A proper VDEF chain is retained by making the first
913 VUSE and the last VDEF of the whole sequence be the same as the replaced
914 statement and using new SSA names for stores in between. */
917 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
920 tree tmp = NULL_TREE; /* Silence warning. */
921 gimple stmt, new_stmt;
922 gimple_stmt_iterator i;
923 gimple_seq stmts = gimple_seq_alloc();
924 struct gimplify_ctx gctx;
926 gimple laststore = NULL;
929 stmt = gsi_stmt (*si_p);
931 gcc_assert (is_gimple_call (stmt));
933 lhs = gimple_call_lhs (stmt);
934 reaching_vuse = gimple_vuse (stmt);
936 push_gimplify_context (&gctx);
938 if (lhs == NULL_TREE)
940 gimplify_and_add (expr, &stmts);
941 /* We can end up with folding a memcpy of an empty class assignment
942 which gets optimized away by C++ gimplification. */
943 if (gimple_seq_empty_p (stmts))
945 if (gimple_in_ssa_p (cfun))
947 unlink_stmt_vdef (stmt);
950 gsi_remove (si_p, true);
955 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
957 pop_gimplify_context (NULL);
959 if (gimple_has_location (stmt))
960 annotate_all_with_location (stmts, gimple_location (stmt));
962 /* The replacement can expose previously unreferenced variables. */
963 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
967 gsi_insert_before (si_p, last, GSI_NEW_STMT);
970 new_stmt = gsi_stmt (i);
971 if (gimple_in_ssa_p (cfun))
973 find_new_referenced_vars (new_stmt);
974 mark_symbols_for_renaming (new_stmt);
976 /* If the new statement has a VUSE, update it with exact SSA name we
977 know will reach this one. */
978 if (gimple_vuse (new_stmt))
980 /* If we've also seen a previous store create a new VDEF for
981 the latter one, and make that the new reaching VUSE. */
984 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
985 gimple_set_vdef (laststore, reaching_vuse);
986 update_stmt (laststore);
989 gimple_set_vuse (new_stmt, reaching_vuse);
990 gimple_set_modified (new_stmt, true);
992 if (gimple_assign_single_p (new_stmt)
993 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
995 laststore = new_stmt;
1000 if (lhs == NULL_TREE)
1002 /* If we replace a call without LHS that has a VDEF and our new
1003 sequence ends with a store we must make that store have the same
1004 vdef in order not to break the sequencing. This can happen
1005 for instance when folding memcpy calls into assignments. */
1006 if (gimple_vdef (stmt) && laststore)
1008 gimple_set_vdef (laststore, gimple_vdef (stmt));
1009 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1010 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1011 update_stmt (laststore);
1013 else if (gimple_in_ssa_p (cfun))
1015 unlink_stmt_vdef (stmt);
1016 release_defs (stmt);
1024 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1027 if (laststore && is_gimple_reg (lhs))
1029 gimple_set_vdef (laststore, gimple_vdef (stmt));
1030 update_stmt (laststore);
1031 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1032 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1037 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1038 gimple_set_vdef (laststore, reaching_vuse);
1039 update_stmt (laststore);
1042 new_stmt = gimple_build_assign (lhs, tmp);
1043 if (!is_gimple_reg (tmp))
1044 gimple_set_vuse (new_stmt, reaching_vuse);
1045 if (!is_gimple_reg (lhs))
1047 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1048 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1049 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1051 else if (reaching_vuse == gimple_vuse (stmt))
1052 unlink_stmt_vdef (stmt);
1055 gimple_set_location (new_stmt, gimple_location (stmt));
1056 gsi_replace (si_p, new_stmt, false);
1059 /* Return the string length, maximum string length or maximum value of
1061 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1062 is not NULL and, for TYPE == 0, its value is not equal to the length
1063 we determine or if we are unable to determine the length or value,
1064 return false. VISITED is a bitmap of visited variables.
1065 TYPE is 0 if string length should be returned, 1 for maximum string
1066 length and 2 for maximum value ARG can have. */
1069 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1074 if (TREE_CODE (arg) != SSA_NAME)
1076 if (TREE_CODE (arg) == COND_EXPR)
1077 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1078 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1079 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1080 else if (TREE_CODE (arg) == ADDR_EXPR
1081 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1082 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1084 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1085 if (TREE_CODE (aop0) == INDIRECT_REF
1086 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1087 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1088 length, visited, type);
1094 if (TREE_CODE (val) != INTEGER_CST
1095 || tree_int_cst_sgn (val) < 0)
1099 val = c_strlen (arg, 1);
1107 if (TREE_CODE (*length) != INTEGER_CST
1108 || TREE_CODE (val) != INTEGER_CST)
1111 if (tree_int_cst_lt (*length, val))
1115 else if (simple_cst_equal (val, *length) != 1)
1123 /* If we were already here, break the infinite cycle. */
1124 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1128 def_stmt = SSA_NAME_DEF_STMT (var);
1130 switch (gimple_code (def_stmt))
1133 /* The RHS of the statement defining VAR must either have a
1134 constant length or come from another SSA_NAME with a constant
1136 if (gimple_assign_single_p (def_stmt)
1137 || gimple_assign_unary_nop_p (def_stmt))
1139 tree rhs = gimple_assign_rhs1 (def_stmt);
1140 return get_maxval_strlen (rhs, length, visited, type);
1146 /* All the arguments of the PHI node must have the same constant
1150 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1152 tree arg = gimple_phi_arg (def_stmt, i)->def;
1154 /* If this PHI has itself as an argument, we cannot
1155 determine the string length of this argument. However,
1156 if we can find a constant string length for the other
1157 PHI args then we can still be sure that this is a
1158 constant string length. So be optimistic and just
1159 continue with the next argument. */
1160 if (arg == gimple_phi_result (def_stmt))
1163 if (!get_maxval_strlen (arg, length, visited, type))
1175 /* Fold builtin call in statement STMT. Returns a simplified tree.
1176 We may return a non-constant expression, including another call
1177 to a different function and with different arguments, e.g.,
1178 substituting memcpy for strcpy when the string length is known.
1179 Note that some builtins expand into inline code that may not
1180 be valid in GIMPLE. Callers must take care. */
1183 gimple_fold_builtin (gimple stmt)
1185 tree result, val[3];
1191 location_t loc = gimple_location (stmt);
1193 gcc_assert (is_gimple_call (stmt));
1195 ignore = (gimple_call_lhs (stmt) == NULL);
1197 /* First try the generic builtin folder. If that succeeds, return the
1199 result = fold_call_stmt (stmt, ignore);
1203 STRIP_NOPS (result);
1207 /* Ignore MD builtins. */
1208 callee = gimple_call_fndecl (stmt);
1209 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1212 /* If the builtin could not be folded, and it has no argument list,
1214 nargs = gimple_call_num_args (stmt);
1218 /* Limit the work only for builtins we know how to simplify. */
1219 switch (DECL_FUNCTION_CODE (callee))
1221 case BUILT_IN_STRLEN:
1222 case BUILT_IN_FPUTS:
1223 case BUILT_IN_FPUTS_UNLOCKED:
1227 case BUILT_IN_STRCPY:
1228 case BUILT_IN_STRNCPY:
1232 case BUILT_IN_MEMCPY_CHK:
1233 case BUILT_IN_MEMPCPY_CHK:
1234 case BUILT_IN_MEMMOVE_CHK:
1235 case BUILT_IN_MEMSET_CHK:
1236 case BUILT_IN_STRNCPY_CHK:
1240 case BUILT_IN_STRCPY_CHK:
1241 case BUILT_IN_STPCPY_CHK:
1245 case BUILT_IN_SNPRINTF_CHK:
1246 case BUILT_IN_VSNPRINTF_CHK:
1254 if (arg_idx >= nargs)
1257 /* Try to use the dataflow information gathered by the CCP process. */
1258 visited = BITMAP_ALLOC (NULL);
1259 bitmap_clear (visited);
1261 memset (val, 0, sizeof (val));
1262 a = gimple_call_arg (stmt, arg_idx);
1263 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1264 val[arg_idx] = NULL_TREE;
1266 BITMAP_FREE (visited);
1269 switch (DECL_FUNCTION_CODE (callee))
1271 case BUILT_IN_STRLEN:
1272 if (val[0] && nargs == 1)
1275 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1277 /* If the result is not a valid gimple value, or not a cast
1278 of a valid gimple value, then we cannot use the result. */
1279 if (is_gimple_val (new_val)
1280 || (CONVERT_EXPR_P (new_val)
1281 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1286 case BUILT_IN_STRCPY:
1287 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1288 result = fold_builtin_strcpy (loc, callee,
1289 gimple_call_arg (stmt, 0),
1290 gimple_call_arg (stmt, 1),
1294 case BUILT_IN_STRNCPY:
1295 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1296 result = fold_builtin_strncpy (loc, callee,
1297 gimple_call_arg (stmt, 0),
1298 gimple_call_arg (stmt, 1),
1299 gimple_call_arg (stmt, 2),
1303 case BUILT_IN_FPUTS:
1305 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1306 gimple_call_arg (stmt, 1),
1307 ignore, false, val[0]);
1310 case BUILT_IN_FPUTS_UNLOCKED:
1312 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1313 gimple_call_arg (stmt, 1),
1314 ignore, true, val[0]);
1317 case BUILT_IN_MEMCPY_CHK:
1318 case BUILT_IN_MEMPCPY_CHK:
1319 case BUILT_IN_MEMMOVE_CHK:
1320 case BUILT_IN_MEMSET_CHK:
1321 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1322 result = fold_builtin_memory_chk (loc, callee,
1323 gimple_call_arg (stmt, 0),
1324 gimple_call_arg (stmt, 1),
1325 gimple_call_arg (stmt, 2),
1326 gimple_call_arg (stmt, 3),
1328 DECL_FUNCTION_CODE (callee));
1331 case BUILT_IN_STRCPY_CHK:
1332 case BUILT_IN_STPCPY_CHK:
1333 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1334 result = fold_builtin_stxcpy_chk (loc, callee,
1335 gimple_call_arg (stmt, 0),
1336 gimple_call_arg (stmt, 1),
1337 gimple_call_arg (stmt, 2),
1339 DECL_FUNCTION_CODE (callee));
1342 case BUILT_IN_STRNCPY_CHK:
1343 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1344 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1345 gimple_call_arg (stmt, 1),
1346 gimple_call_arg (stmt, 2),
1347 gimple_call_arg (stmt, 3),
1351 case BUILT_IN_SNPRINTF_CHK:
1352 case BUILT_IN_VSNPRINTF_CHK:
1353 if (val[1] && is_gimple_val (val[1]))
1354 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1355 DECL_FUNCTION_CODE (callee));
1362 if (result && ignore)
1363 result = fold_ignored_result (result);
1367 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1368 it is found or NULL_TREE if it is not. */
1371 get_base_binfo_for_type (tree binfo, tree type)
1375 tree res = NULL_TREE;
1377 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1378 if (TREE_TYPE (base_binfo) == type)
1387 /* Return a binfo describing the part of object referenced by expression REF.
1388 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1389 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1390 a simple declaration, indirect reference or an SSA_NAME. If the function
1391 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1392 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1393 Otherwise the first non-artificial field declaration or the base declaration
1394 will be examined to get the encapsulating type. */
1397 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1401 if (TREE_CODE (ref) == COMPONENT_REF)
1405 tree field = TREE_OPERAND (ref, 1);
1407 if (!DECL_ARTIFICIAL (field))
1409 tree type = TREE_TYPE (field);
1410 if (TREE_CODE (type) == RECORD_TYPE)
1411 return TYPE_BINFO (type);
1416 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1417 binfo = TYPE_BINFO (par_type);
1419 || BINFO_N_BASE_BINFOS (binfo) == 0)
1422 /* Offset 0 indicates the primary base, whose vtable contents are
1423 represented in the binfo for the derived class. */
1424 if (int_bit_position (field) != 0)
1428 /* Get descendant binfo. */
1429 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1433 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1436 ref = TREE_OPERAND (ref, 0);
1438 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1439 return TYPE_BINFO (TREE_TYPE (ref));
1440 else if (known_binfo
1441 && (TREE_CODE (ref) == SSA_NAME
1442 || TREE_CODE (ref) == MEM_REF))
1449 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1450 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1451 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1454 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1457 tree v, fndecl, delta;
1459 v = BINFO_VIRTUALS (known_binfo);
1463 i += (TARGET_VTABLE_USES_DESCRIPTORS
1464 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1468 fndecl = TREE_VALUE (v);
1469 delta = TREE_PURPOSE (v);
1470 gcc_assert (host_integerp (delta, 0));
1472 if (integer_nonzerop (delta))
1474 struct cgraph_node *node = cgraph_get_node (fndecl);
1475 HOST_WIDE_INT off = tree_low_cst (delta, 0);
1479 for (node = node->same_body; node; node = node->next)
1480 if (node->thunk.thunk_p && off == node->thunk.fixed_offset)
1483 fndecl = node->decl;
1488 /* When cgraph node is missing and function is not public, we cannot
1489 devirtualize. This can happen in WHOPR when the actual method
1490 ends up in other partition, because we found devirtualization
1491 possibility too late. */
1492 if (!can_refer_decl_in_current_unit_p (fndecl))
1494 return build_fold_addr_expr (fndecl);
1498 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1499 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1500 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1501 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1502 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1505 gimple_fold_obj_type_ref (tree ref, tree known_type)
1507 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1508 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1511 if (TREE_CODE (obj) == ADDR_EXPR)
1512 obj = TREE_OPERAND (obj, 0);
1514 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1517 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1518 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1519 if (!BINFO_VIRTUALS (binfo))
1521 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1527 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1528 The statement may be replaced by another statement, e.g., if the call
1529 simplifies to a constant value. Return true if any changes were made.
1530 It is assumed that the operands have been previously folded. */
1533 fold_gimple_call (gimple_stmt_iterator *gsi, bool inplace)
1535 gimple stmt = gsi_stmt (*gsi);
1537 tree callee = gimple_call_fndecl (stmt);
1539 /* Check for builtins that CCP can handle using information not
1540 available in the generic fold routines. */
1541 if (!inplace && callee && DECL_BUILT_IN (callee))
1543 tree result = gimple_fold_builtin (stmt);
1547 if (!update_call_from_tree (gsi, result))
1548 gimplify_and_update_call_from_tree (gsi, result);
1554 /* ??? Should perhaps do this in fold proper. However, doing it
1555 there requires that we create a new CALL_EXPR, and that requires
1556 copying EH region info to the new node. Easier to just do it
1557 here where we can just smash the call operand. */
1558 callee = gimple_call_fn (stmt);
1559 if (TREE_CODE (callee) == OBJ_TYPE_REF
1560 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1564 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1567 gimple_call_set_fn (stmt, t);
1576 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1577 distinguishes both cases. */
1580 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1582 bool changed = false;
1583 gimple stmt = gsi_stmt (*gsi);
1586 /* Fold the main computation performed by the statement. */
1587 switch (gimple_code (stmt))
1591 unsigned old_num_ops = gimple_num_ops (stmt);
1592 tree new_rhs = fold_gimple_assign (gsi);
1593 tree lhs = gimple_assign_lhs (stmt);
1595 && !useless_type_conversion_p (TREE_TYPE (lhs),
1596 TREE_TYPE (new_rhs)))
1597 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1600 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1602 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1609 changed |= fold_gimple_cond (stmt);
1613 /* Fold *& in call arguments. */
1614 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1615 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1617 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1620 gimple_call_set_arg (stmt, i, tmp);
1624 changed |= fold_gimple_call (gsi, inplace);
1628 /* Fold *& in asm operands. */
1629 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1631 tree link = gimple_asm_output_op (stmt, i);
1632 tree op = TREE_VALUE (link);
1633 if (REFERENCE_CLASS_P (op)
1634 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1636 TREE_VALUE (link) = op;
1640 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1642 tree link = gimple_asm_input_op (stmt, i);
1643 tree op = TREE_VALUE (link);
1644 if (REFERENCE_CLASS_P (op)
1645 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1647 TREE_VALUE (link) = op;
1654 if (gimple_debug_bind_p (stmt))
1656 tree val = gimple_debug_bind_get_value (stmt);
1658 && REFERENCE_CLASS_P (val))
1660 tree tem = maybe_fold_reference (val, false);
1663 gimple_debug_bind_set_value (stmt, tem);
1673 stmt = gsi_stmt (*gsi);
1675 /* Fold *& on the lhs. */
1676 if (gimple_has_lhs (stmt))
1678 tree lhs = gimple_get_lhs (stmt);
1679 if (lhs && REFERENCE_CLASS_P (lhs))
1681 tree new_lhs = maybe_fold_reference (lhs, true);
1684 gimple_set_lhs (stmt, new_lhs);
1693 /* Fold the statement pointed to by GSI. In some cases, this function may
1694 replace the whole statement with a new one. Returns true iff folding
1696 The statement pointed to by GSI should be in valid gimple form but may
1697 be in unfolded state as resulting from for example constant propagation
1698 which can produce *&x = 0. */
1701 fold_stmt (gimple_stmt_iterator *gsi)
1703 return fold_stmt_1 (gsi, false);
1706 /* Perform the minimal folding on statement STMT. Only operations like
1707 *&x created by constant propagation are handled. The statement cannot
1708 be replaced with a new one. Return true if the statement was
1709 changed, false otherwise.
1710 The statement STMT should be in valid gimple form but may
1711 be in unfolded state as resulting from for example constant propagation
1712 which can produce *&x = 0. */
1715 fold_stmt_inplace (gimple stmt)
1717 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1718 bool changed = fold_stmt_1 (&gsi, true);
1719 gcc_assert (gsi_stmt (gsi) == stmt);
1723 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1724 if EXPR is null or we don't know how.
1725 If non-null, the result always has boolean type. */
1728 canonicalize_bool (tree expr, bool invert)
1734 if (integer_nonzerop (expr))
1735 return boolean_false_node;
1736 else if (integer_zerop (expr))
1737 return boolean_true_node;
1738 else if (TREE_CODE (expr) == SSA_NAME)
1739 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1740 build_int_cst (TREE_TYPE (expr), 0));
1741 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1742 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1744 TREE_OPERAND (expr, 0),
1745 TREE_OPERAND (expr, 1));
1751 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1753 if (integer_nonzerop (expr))
1754 return boolean_true_node;
1755 else if (integer_zerop (expr))
1756 return boolean_false_node;
1757 else if (TREE_CODE (expr) == SSA_NAME)
1758 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1759 build_int_cst (TREE_TYPE (expr), 0));
1760 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1761 return fold_build2 (TREE_CODE (expr),
1763 TREE_OPERAND (expr, 0),
1764 TREE_OPERAND (expr, 1));
1770 /* Check to see if a boolean expression EXPR is logically equivalent to the
1771 comparison (OP1 CODE OP2). Check for various identities involving
1775 same_bool_comparison_p (const_tree expr, enum tree_code code,
1776 const_tree op1, const_tree op2)
1780 /* The obvious case. */
1781 if (TREE_CODE (expr) == code
1782 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1783 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1786 /* Check for comparing (name, name != 0) and the case where expr
1787 is an SSA_NAME with a definition matching the comparison. */
1788 if (TREE_CODE (expr) == SSA_NAME
1789 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1791 if (operand_equal_p (expr, op1, 0))
1792 return ((code == NE_EXPR && integer_zerop (op2))
1793 || (code == EQ_EXPR && integer_nonzerop (op2)));
1794 s = SSA_NAME_DEF_STMT (expr);
1795 if (is_gimple_assign (s)
1796 && gimple_assign_rhs_code (s) == code
1797 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1798 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1802 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1803 of name is a comparison, recurse. */
1804 if (TREE_CODE (op1) == SSA_NAME
1805 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1807 s = SSA_NAME_DEF_STMT (op1);
1808 if (is_gimple_assign (s)
1809 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1811 enum tree_code c = gimple_assign_rhs_code (s);
1812 if ((c == NE_EXPR && integer_zerop (op2))
1813 || (c == EQ_EXPR && integer_nonzerop (op2)))
1814 return same_bool_comparison_p (expr, c,
1815 gimple_assign_rhs1 (s),
1816 gimple_assign_rhs2 (s));
1817 if ((c == EQ_EXPR && integer_zerop (op2))
1818 || (c == NE_EXPR && integer_nonzerop (op2)))
1819 return same_bool_comparison_p (expr,
1820 invert_tree_comparison (c, false),
1821 gimple_assign_rhs1 (s),
1822 gimple_assign_rhs2 (s));
1828 /* Check to see if two boolean expressions OP1 and OP2 are logically
1832 same_bool_result_p (const_tree op1, const_tree op2)
1834 /* Simple cases first. */
1835 if (operand_equal_p (op1, op2, 0))
1838 /* Check the cases where at least one of the operands is a comparison.
1839 These are a bit smarter than operand_equal_p in that they apply some
1840 identifies on SSA_NAMEs. */
1841 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1842 && same_bool_comparison_p (op1, TREE_CODE (op2),
1843 TREE_OPERAND (op2, 0),
1844 TREE_OPERAND (op2, 1)))
1846 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1847 && same_bool_comparison_p (op2, TREE_CODE (op1),
1848 TREE_OPERAND (op1, 0),
1849 TREE_OPERAND (op1, 1)))
1856 /* Forward declarations for some mutually recursive functions. */
1859 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1860 enum tree_code code2, tree op2a, tree op2b);
1862 and_var_with_comparison (tree var, bool invert,
1863 enum tree_code code2, tree op2a, tree op2b);
1865 and_var_with_comparison_1 (gimple stmt,
1866 enum tree_code code2, tree op2a, tree op2b);
1868 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1869 enum tree_code code2, tree op2a, tree op2b);
1871 or_var_with_comparison (tree var, bool invert,
1872 enum tree_code code2, tree op2a, tree op2b);
1874 or_var_with_comparison_1 (gimple stmt,
1875 enum tree_code code2, tree op2a, tree op2b);
1877 /* Helper function for and_comparisons_1: try to simplify the AND of the
1878 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1879 If INVERT is true, invert the value of the VAR before doing the AND.
1880 Return NULL_EXPR if we can't simplify this to a single expression. */
1883 and_var_with_comparison (tree var, bool invert,
1884 enum tree_code code2, tree op2a, tree op2b)
1887 gimple stmt = SSA_NAME_DEF_STMT (var);
1889 /* We can only deal with variables whose definitions are assignments. */
1890 if (!is_gimple_assign (stmt))
1893 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1894 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1895 Then we only have to consider the simpler non-inverted cases. */
1897 t = or_var_with_comparison_1 (stmt,
1898 invert_tree_comparison (code2, false),
1901 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1902 return canonicalize_bool (t, invert);
1905 /* Try to simplify the AND of the ssa variable defined by the assignment
1906 STMT with the comparison specified by (OP2A CODE2 OP2B).
1907 Return NULL_EXPR if we can't simplify this to a single expression. */
1910 and_var_with_comparison_1 (gimple stmt,
1911 enum tree_code code2, tree op2a, tree op2b)
1913 tree var = gimple_assign_lhs (stmt);
1914 tree true_test_var = NULL_TREE;
1915 tree false_test_var = NULL_TREE;
1916 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1918 /* Check for identities like (var AND (var == 0)) => false. */
1919 if (TREE_CODE (op2a) == SSA_NAME
1920 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1922 if ((code2 == NE_EXPR && integer_zerop (op2b))
1923 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1925 true_test_var = op2a;
1926 if (var == true_test_var)
1929 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1930 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1932 false_test_var = op2a;
1933 if (var == false_test_var)
1934 return boolean_false_node;
1938 /* If the definition is a comparison, recurse on it. */
1939 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1941 tree t = and_comparisons_1 (innercode,
1942 gimple_assign_rhs1 (stmt),
1943 gimple_assign_rhs2 (stmt),
1951 /* If the definition is an AND or OR expression, we may be able to
1952 simplify by reassociating. */
1953 if (innercode == TRUTH_AND_EXPR
1954 || innercode == TRUTH_OR_EXPR
1955 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1956 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1958 tree inner1 = gimple_assign_rhs1 (stmt);
1959 tree inner2 = gimple_assign_rhs2 (stmt);
1962 tree partial = NULL_TREE;
1963 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1965 /* Check for boolean identities that don't require recursive examination
1967 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1968 inner1 AND (inner1 OR inner2) => inner1
1969 !inner1 AND (inner1 AND inner2) => false
1970 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1971 Likewise for similar cases involving inner2. */
1972 if (inner1 == true_test_var)
1973 return (is_and ? var : inner1);
1974 else if (inner2 == true_test_var)
1975 return (is_and ? var : inner2);
1976 else if (inner1 == false_test_var)
1978 ? boolean_false_node
1979 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1980 else if (inner2 == false_test_var)
1982 ? boolean_false_node
1983 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1985 /* Next, redistribute/reassociate the AND across the inner tests.
1986 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1987 if (TREE_CODE (inner1) == SSA_NAME
1988 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1989 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1990 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1991 gimple_assign_rhs1 (s),
1992 gimple_assign_rhs2 (s),
1993 code2, op2a, op2b)))
1995 /* Handle the AND case, where we are reassociating:
1996 (inner1 AND inner2) AND (op2a code2 op2b)
1998 If the partial result t is a constant, we win. Otherwise
1999 continue on to try reassociating with the other inner test. */
2002 if (integer_onep (t))
2004 else if (integer_zerop (t))
2005 return boolean_false_node;
2008 /* Handle the OR case, where we are redistributing:
2009 (inner1 OR inner2) AND (op2a code2 op2b)
2010 => (t OR (inner2 AND (op2a code2 op2b))) */
2013 if (integer_onep (t))
2014 return boolean_true_node;
2016 /* Save partial result for later. */
2021 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2022 if (TREE_CODE (inner2) == SSA_NAME
2023 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2024 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2025 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2026 gimple_assign_rhs1 (s),
2027 gimple_assign_rhs2 (s),
2028 code2, op2a, op2b)))
2030 /* Handle the AND case, where we are reassociating:
2031 (inner1 AND inner2) AND (op2a code2 op2b)
2032 => (inner1 AND t) */
2035 if (integer_onep (t))
2037 else if (integer_zerop (t))
2038 return boolean_false_node;
2041 /* Handle the OR case. where we are redistributing:
2042 (inner1 OR inner2) AND (op2a code2 op2b)
2043 => (t OR (inner1 AND (op2a code2 op2b)))
2044 => (t OR partial) */
2047 if (integer_onep (t))
2048 return boolean_true_node;
2051 /* We already got a simplification for the other
2052 operand to the redistributed OR expression. The
2053 interesting case is when at least one is false.
2054 Or, if both are the same, we can apply the identity
2056 if (integer_zerop (partial))
2058 else if (integer_zerop (t))
2060 else if (same_bool_result_p (t, partial))
2069 /* Try to simplify the AND of two comparisons defined by
2070 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2071 If this can be done without constructing an intermediate value,
2072 return the resulting tree; otherwise NULL_TREE is returned.
2073 This function is deliberately asymmetric as it recurses on SSA_DEFs
2074 in the first comparison but not the second. */
2077 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2078 enum tree_code code2, tree op2a, tree op2b)
2080 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2081 if (operand_equal_p (op1a, op2a, 0)
2082 && operand_equal_p (op1b, op2b, 0))
2084 tree t = combine_comparisons (UNKNOWN_LOCATION,
2085 TRUTH_ANDIF_EXPR, code1, code2,
2086 boolean_type_node, op1a, op1b);
2091 /* Likewise the swapped case of the above. */
2092 if (operand_equal_p (op1a, op2b, 0)
2093 && operand_equal_p (op1b, op2a, 0))
2095 tree t = combine_comparisons (UNKNOWN_LOCATION,
2096 TRUTH_ANDIF_EXPR, code1,
2097 swap_tree_comparison (code2),
2098 boolean_type_node, op1a, op1b);
2103 /* If both comparisons are of the same value against constants, we might
2104 be able to merge them. */
2105 if (operand_equal_p (op1a, op2a, 0)
2106 && TREE_CODE (op1b) == INTEGER_CST
2107 && TREE_CODE (op2b) == INTEGER_CST)
2109 int cmp = tree_int_cst_compare (op1b, op2b);
2111 /* If we have (op1a == op1b), we should either be able to
2112 return that or FALSE, depending on whether the constant op1b
2113 also satisfies the other comparison against op2b. */
2114 if (code1 == EQ_EXPR)
2120 case EQ_EXPR: val = (cmp == 0); break;
2121 case NE_EXPR: val = (cmp != 0); break;
2122 case LT_EXPR: val = (cmp < 0); break;
2123 case GT_EXPR: val = (cmp > 0); break;
2124 case LE_EXPR: val = (cmp <= 0); break;
2125 case GE_EXPR: val = (cmp >= 0); break;
2126 default: done = false;
2131 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2133 return boolean_false_node;
2136 /* Likewise if the second comparison is an == comparison. */
2137 else if (code2 == EQ_EXPR)
2143 case EQ_EXPR: val = (cmp == 0); break;
2144 case NE_EXPR: val = (cmp != 0); break;
2145 case LT_EXPR: val = (cmp > 0); break;
2146 case GT_EXPR: val = (cmp < 0); break;
2147 case LE_EXPR: val = (cmp >= 0); break;
2148 case GE_EXPR: val = (cmp <= 0); break;
2149 default: done = false;
2154 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2156 return boolean_false_node;
2160 /* Same business with inequality tests. */
2161 else if (code1 == NE_EXPR)
2166 case EQ_EXPR: val = (cmp != 0); break;
2167 case NE_EXPR: val = (cmp == 0); break;
2168 case LT_EXPR: val = (cmp >= 0); break;
2169 case GT_EXPR: val = (cmp <= 0); break;
2170 case LE_EXPR: val = (cmp > 0); break;
2171 case GE_EXPR: val = (cmp < 0); break;
2176 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2178 else if (code2 == NE_EXPR)
2183 case EQ_EXPR: val = (cmp == 0); break;
2184 case NE_EXPR: val = (cmp != 0); break;
2185 case LT_EXPR: val = (cmp <= 0); break;
2186 case GT_EXPR: val = (cmp >= 0); break;
2187 case LE_EXPR: val = (cmp < 0); break;
2188 case GE_EXPR: val = (cmp > 0); break;
2193 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2196 /* Chose the more restrictive of two < or <= comparisons. */
2197 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2198 && (code2 == LT_EXPR || code2 == LE_EXPR))
2200 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2201 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2203 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2206 /* Likewise chose the more restrictive of two > or >= comparisons. */
2207 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2208 && (code2 == GT_EXPR || code2 == GE_EXPR))
2210 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2211 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2213 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2216 /* Check for singleton ranges. */
2218 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2219 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2220 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2222 /* Check for disjoint ranges. */
2224 && (code1 == LT_EXPR || code1 == LE_EXPR)
2225 && (code2 == GT_EXPR || code2 == GE_EXPR))
2226 return boolean_false_node;
2228 && (code1 == GT_EXPR || code1 == GE_EXPR)
2229 && (code2 == LT_EXPR || code2 == LE_EXPR))
2230 return boolean_false_node;
2233 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2234 NAME's definition is a truth value. See if there are any simplifications
2235 that can be done against the NAME's definition. */
2236 if (TREE_CODE (op1a) == SSA_NAME
2237 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2238 && (integer_zerop (op1b) || integer_onep (op1b)))
2240 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2241 || (code1 == NE_EXPR && integer_onep (op1b)));
2242 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2243 switch (gimple_code (stmt))
2246 /* Try to simplify by copy-propagating the definition. */
2247 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2250 /* If every argument to the PHI produces the same result when
2251 ANDed with the second comparison, we win.
2252 Do not do this unless the type is bool since we need a bool
2253 result here anyway. */
2254 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2256 tree result = NULL_TREE;
2258 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2260 tree arg = gimple_phi_arg_def (stmt, i);
2262 /* If this PHI has itself as an argument, ignore it.
2263 If all the other args produce the same result,
2265 if (arg == gimple_phi_result (stmt))
2267 else if (TREE_CODE (arg) == INTEGER_CST)
2269 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2272 result = boolean_false_node;
2273 else if (!integer_zerop (result))
2277 result = fold_build2 (code2, boolean_type_node,
2279 else if (!same_bool_comparison_p (result,
2283 else if (TREE_CODE (arg) == SSA_NAME)
2285 tree temp = and_var_with_comparison (arg, invert,
2291 else if (!same_bool_result_p (result, temp))
2307 /* Try to simplify the AND of two comparisons, specified by
2308 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2309 If this can be simplified to a single expression (without requiring
2310 introducing more SSA variables to hold intermediate values),
2311 return the resulting tree. Otherwise return NULL_TREE.
2312 If the result expression is non-null, it has boolean type. */
2315 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2316 enum tree_code code2, tree op2a, tree op2b)
2318 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2322 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2325 /* Helper function for or_comparisons_1: try to simplify the OR of the
2326 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2327 If INVERT is true, invert the value of VAR before doing the OR.
2328 Return NULL_EXPR if we can't simplify this to a single expression. */
2331 or_var_with_comparison (tree var, bool invert,
2332 enum tree_code code2, tree op2a, tree op2b)
2335 gimple stmt = SSA_NAME_DEF_STMT (var);
2337 /* We can only deal with variables whose definitions are assignments. */
2338 if (!is_gimple_assign (stmt))
2341 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2342 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2343 Then we only have to consider the simpler non-inverted cases. */
2345 t = and_var_with_comparison_1 (stmt,
2346 invert_tree_comparison (code2, false),
2349 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2350 return canonicalize_bool (t, invert);
2353 /* Try to simplify the OR of the ssa variable defined by the assignment
2354 STMT with the comparison specified by (OP2A CODE2 OP2B).
2355 Return NULL_EXPR if we can't simplify this to a single expression. */
2358 or_var_with_comparison_1 (gimple stmt,
2359 enum tree_code code2, tree op2a, tree op2b)
2361 tree var = gimple_assign_lhs (stmt);
2362 tree true_test_var = NULL_TREE;
2363 tree false_test_var = NULL_TREE;
2364 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2366 /* Check for identities like (var OR (var != 0)) => true . */
2367 if (TREE_CODE (op2a) == SSA_NAME
2368 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2370 if ((code2 == NE_EXPR && integer_zerop (op2b))
2371 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2373 true_test_var = op2a;
2374 if (var == true_test_var)
2377 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2378 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2380 false_test_var = op2a;
2381 if (var == false_test_var)
2382 return boolean_true_node;
2386 /* If the definition is a comparison, recurse on it. */
2387 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2389 tree t = or_comparisons_1 (innercode,
2390 gimple_assign_rhs1 (stmt),
2391 gimple_assign_rhs2 (stmt),
2399 /* If the definition is an AND or OR expression, we may be able to
2400 simplify by reassociating. */
2401 if (innercode == TRUTH_AND_EXPR
2402 || innercode == TRUTH_OR_EXPR
2403 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2404 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2406 tree inner1 = gimple_assign_rhs1 (stmt);
2407 tree inner2 = gimple_assign_rhs2 (stmt);
2410 tree partial = NULL_TREE;
2411 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2413 /* Check for boolean identities that don't require recursive examination
2415 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2416 inner1 OR (inner1 AND inner2) => inner1
2417 !inner1 OR (inner1 OR inner2) => true
2418 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2420 if (inner1 == true_test_var)
2421 return (is_or ? var : inner1);
2422 else if (inner2 == true_test_var)
2423 return (is_or ? var : inner2);
2424 else if (inner1 == false_test_var)
2427 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2428 else if (inner2 == false_test_var)
2431 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2433 /* Next, redistribute/reassociate the OR across the inner tests.
2434 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2435 if (TREE_CODE (inner1) == SSA_NAME
2436 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2437 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2438 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2439 gimple_assign_rhs1 (s),
2440 gimple_assign_rhs2 (s),
2441 code2, op2a, op2b)))
2443 /* Handle the OR case, where we are reassociating:
2444 (inner1 OR inner2) OR (op2a code2 op2b)
2446 If the partial result t is a constant, we win. Otherwise
2447 continue on to try reassociating with the other inner test. */
2448 if (innercode == TRUTH_OR_EXPR)
2450 if (integer_onep (t))
2451 return boolean_true_node;
2452 else if (integer_zerop (t))
2456 /* Handle the AND case, where we are redistributing:
2457 (inner1 AND inner2) OR (op2a code2 op2b)
2458 => (t AND (inner2 OR (op2a code op2b))) */
2461 if (integer_zerop (t))
2462 return boolean_false_node;
2464 /* Save partial result for later. */
2469 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2470 if (TREE_CODE (inner2) == SSA_NAME
2471 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2472 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2473 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2474 gimple_assign_rhs1 (s),
2475 gimple_assign_rhs2 (s),
2476 code2, op2a, op2b)))
2478 /* Handle the OR case, where we are reassociating:
2479 (inner1 OR inner2) OR (op2a code2 op2b)
2481 if (innercode == TRUTH_OR_EXPR)
2483 if (integer_zerop (t))
2485 else if (integer_onep (t))
2486 return boolean_true_node;
2489 /* Handle the AND case, where we are redistributing:
2490 (inner1 AND inner2) OR (op2a code2 op2b)
2491 => (t AND (inner1 OR (op2a code2 op2b)))
2492 => (t AND partial) */
2495 if (integer_zerop (t))
2496 return boolean_false_node;
2499 /* We already got a simplification for the other
2500 operand to the redistributed AND expression. The
2501 interesting case is when at least one is true.
2502 Or, if both are the same, we can apply the identity
2503 (x AND x) == true. */
2504 if (integer_onep (partial))
2506 else if (integer_onep (t))
2508 else if (same_bool_result_p (t, partial))
2509 return boolean_true_node;
2517 /* Try to simplify the OR of two comparisons defined by
2518 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2519 If this can be done without constructing an intermediate value,
2520 return the resulting tree; otherwise NULL_TREE is returned.
2521 This function is deliberately asymmetric as it recurses on SSA_DEFs
2522 in the first comparison but not the second. */
2525 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2526 enum tree_code code2, tree op2a, tree op2b)
2528 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2529 if (operand_equal_p (op1a, op2a, 0)
2530 && operand_equal_p (op1b, op2b, 0))
2532 tree t = combine_comparisons (UNKNOWN_LOCATION,
2533 TRUTH_ORIF_EXPR, code1, code2,
2534 boolean_type_node, op1a, op1b);
2539 /* Likewise the swapped case of the above. */
2540 if (operand_equal_p (op1a, op2b, 0)
2541 && operand_equal_p (op1b, op2a, 0))
2543 tree t = combine_comparisons (UNKNOWN_LOCATION,
2544 TRUTH_ORIF_EXPR, code1,
2545 swap_tree_comparison (code2),
2546 boolean_type_node, op1a, op1b);
2551 /* If both comparisons are of the same value against constants, we might
2552 be able to merge them. */
2553 if (operand_equal_p (op1a, op2a, 0)
2554 && TREE_CODE (op1b) == INTEGER_CST
2555 && TREE_CODE (op2b) == INTEGER_CST)
2557 int cmp = tree_int_cst_compare (op1b, op2b);
2559 /* If we have (op1a != op1b), we should either be able to
2560 return that or TRUE, depending on whether the constant op1b
2561 also satisfies the other comparison against op2b. */
2562 if (code1 == NE_EXPR)
2568 case EQ_EXPR: val = (cmp == 0); break;
2569 case NE_EXPR: val = (cmp != 0); break;
2570 case LT_EXPR: val = (cmp < 0); break;
2571 case GT_EXPR: val = (cmp > 0); break;
2572 case LE_EXPR: val = (cmp <= 0); break;
2573 case GE_EXPR: val = (cmp >= 0); break;
2574 default: done = false;
2579 return boolean_true_node;
2581 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2584 /* Likewise if the second comparison is a != comparison. */
2585 else if (code2 == NE_EXPR)
2591 case EQ_EXPR: val = (cmp == 0); break;
2592 case NE_EXPR: val = (cmp != 0); break;
2593 case LT_EXPR: val = (cmp > 0); break;
2594 case GT_EXPR: val = (cmp < 0); break;
2595 case LE_EXPR: val = (cmp >= 0); break;
2596 case GE_EXPR: val = (cmp <= 0); break;
2597 default: done = false;
2602 return boolean_true_node;
2604 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2608 /* See if an equality test is redundant with the other comparison. */
2609 else if (code1 == EQ_EXPR)
2614 case EQ_EXPR: val = (cmp == 0); break;
2615 case NE_EXPR: val = (cmp != 0); break;
2616 case LT_EXPR: val = (cmp < 0); break;
2617 case GT_EXPR: val = (cmp > 0); break;
2618 case LE_EXPR: val = (cmp <= 0); break;
2619 case GE_EXPR: val = (cmp >= 0); break;
2624 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2626 else if (code2 == EQ_EXPR)
2631 case EQ_EXPR: val = (cmp == 0); break;
2632 case NE_EXPR: val = (cmp != 0); break;
2633 case LT_EXPR: val = (cmp > 0); break;
2634 case GT_EXPR: val = (cmp < 0); break;
2635 case LE_EXPR: val = (cmp >= 0); break;
2636 case GE_EXPR: val = (cmp <= 0); break;
2641 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2644 /* Chose the less restrictive of two < or <= comparisons. */
2645 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2646 && (code2 == LT_EXPR || code2 == LE_EXPR))
2648 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2649 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2651 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2654 /* Likewise chose the less restrictive of two > or >= comparisons. */
2655 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2656 && (code2 == GT_EXPR || code2 == GE_EXPR))
2658 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2659 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2661 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2664 /* Check for singleton ranges. */
2666 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2667 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2668 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2670 /* Check for less/greater pairs that don't restrict the range at all. */
2672 && (code1 == LT_EXPR || code1 == LE_EXPR)
2673 && (code2 == GT_EXPR || code2 == GE_EXPR))
2674 return boolean_true_node;
2676 && (code1 == GT_EXPR || code1 == GE_EXPR)
2677 && (code2 == LT_EXPR || code2 == LE_EXPR))
2678 return boolean_true_node;
2681 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2682 NAME's definition is a truth value. See if there are any simplifications
2683 that can be done against the NAME's definition. */
2684 if (TREE_CODE (op1a) == SSA_NAME
2685 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2686 && (integer_zerop (op1b) || integer_onep (op1b)))
2688 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2689 || (code1 == NE_EXPR && integer_onep (op1b)));
2690 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2691 switch (gimple_code (stmt))
2694 /* Try to simplify by copy-propagating the definition. */
2695 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2698 /* If every argument to the PHI produces the same result when
2699 ORed with the second comparison, we win.
2700 Do not do this unless the type is bool since we need a bool
2701 result here anyway. */
2702 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2704 tree result = NULL_TREE;
2706 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2708 tree arg = gimple_phi_arg_def (stmt, i);
2710 /* If this PHI has itself as an argument, ignore it.
2711 If all the other args produce the same result,
2713 if (arg == gimple_phi_result (stmt))
2715 else if (TREE_CODE (arg) == INTEGER_CST)
2717 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2720 result = boolean_true_node;
2721 else if (!integer_onep (result))
2725 result = fold_build2 (code2, boolean_type_node,
2727 else if (!same_bool_comparison_p (result,
2731 else if (TREE_CODE (arg) == SSA_NAME)
2733 tree temp = or_var_with_comparison (arg, invert,
2739 else if (!same_bool_result_p (result, temp))
2755 /* Try to simplify the OR of two comparisons, specified by
2756 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2757 If this can be simplified to a single expression (without requiring
2758 introducing more SSA variables to hold intermediate values),
2759 return the resulting tree. Otherwise return NULL_TREE.
2760 If the result expression is non-null, it has boolean type. */
2763 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2764 enum tree_code code2, tree op2a, tree op2b)
2766 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2770 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);