1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
39 get_symbol_constant_value (tree sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
45 tree val = DECL_INITIAL (sym);
49 if (is_gimple_min_invariant (val))
51 if (TREE_CODE (val) == ADDR_EXPR)
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
83 may_propagate_address_into_dereference (tree addr, tree deref)
85 gcc_assert (INDIRECT_REF_P (deref)
86 && TREE_CODE (addr) == ADDR_EXPR);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
112 is the desired result type.
114 LOC is the location of the original expression. */
117 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
119 bool allow_negative_idx)
121 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
122 tree array_type, elt_type, elt_size;
125 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
126 measured in units of the size of elements type) from that ARRAY_REF).
127 We can't do anything if either is variable.
129 The case we handle here is *(&A[N]+O). */
130 if (TREE_CODE (base) == ARRAY_REF)
132 tree low_bound = array_ref_low_bound (base);
134 elt_offset = TREE_OPERAND (base, 1);
135 if (TREE_CODE (low_bound) != INTEGER_CST
136 || TREE_CODE (elt_offset) != INTEGER_CST)
139 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
140 base = TREE_OPERAND (base, 0);
143 /* Ignore stupid user tricks of indexing non-array variables. */
144 array_type = TREE_TYPE (base);
145 if (TREE_CODE (array_type) != ARRAY_TYPE)
147 elt_type = TREE_TYPE (array_type);
148 if (!useless_type_conversion_p (orig_type, elt_type))
151 /* Use signed size type for intermediate computation on the index. */
152 idx_type = ssizetype;
154 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
155 element type (so we can use the alignment if it's not constant).
156 Otherwise, compute the offset as an index by using a division. If the
157 division isn't exact, then don't do anything. */
158 elt_size = TYPE_SIZE_UNIT (elt_type);
161 if (integer_zerop (offset))
163 if (TREE_CODE (elt_size) != INTEGER_CST)
164 elt_size = size_int (TYPE_ALIGN (elt_type));
166 idx = build_int_cst (idx_type, 0);
170 unsigned HOST_WIDE_INT lquo, lrem;
171 HOST_WIDE_INT hquo, hrem;
174 /* The final array offset should be signed, so we need
175 to sign-extend the (possibly pointer) offset here
176 and use signed division. */
177 soffset = double_int_sext (tree_to_double_int (offset),
178 TYPE_PRECISION (TREE_TYPE (offset)));
179 if (TREE_CODE (elt_size) != INTEGER_CST
180 || div_and_round_double (TRUNC_DIV_EXPR, 0,
181 soffset.low, soffset.high,
182 TREE_INT_CST_LOW (elt_size),
183 TREE_INT_CST_HIGH (elt_size),
184 &lquo, &hquo, &lrem, &hrem)
188 idx = build_int_cst_wide (idx_type, lquo, hquo);
191 /* Assume the low bound is zero. If there is a domain type, get the
192 low bound, if any, convert the index into that type, and add the
194 min_idx = build_int_cst (idx_type, 0);
195 domain_type = TYPE_DOMAIN (array_type);
198 idx_type = domain_type;
199 if (TYPE_MIN_VALUE (idx_type))
200 min_idx = TYPE_MIN_VALUE (idx_type);
202 min_idx = fold_convert (idx_type, min_idx);
204 if (TREE_CODE (min_idx) != INTEGER_CST)
207 elt_offset = fold_convert (idx_type, elt_offset);
210 if (!integer_zerop (min_idx))
211 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
212 if (!integer_zerop (elt_offset))
213 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
215 /* Make sure to possibly truncate late after offsetting. */
216 idx = fold_convert (idx_type, idx);
218 /* We don't want to construct access past array bounds. For example
221 should not be simplified into (*c)[14] or tree-vrp will
222 give false warnings. The same is true for
223 struct A { long x; char d[0]; } *a;
225 which should be not folded to &a->d[-8]. */
227 && TYPE_MAX_VALUE (domain_type)
228 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
230 tree up_bound = TYPE_MAX_VALUE (domain_type);
232 if (tree_int_cst_lt (up_bound, idx)
233 /* Accesses after the end of arrays of size 0 (gcc
234 extension) and 1 are likely intentional ("struct
236 && compare_tree_int (up_bound, 1) > 0)
240 && TYPE_MIN_VALUE (domain_type))
242 if (!allow_negative_idx
243 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
244 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
247 else if (!allow_negative_idx
248 && compare_tree_int (idx, 0) < 0)
252 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
253 SET_EXPR_LOCATION (t, loc);
259 /* Attempt to fold *(S+O) to S.X.
260 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
261 is the desired result type.
263 LOC is the location of the original expression. */
266 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
267 tree base, tree offset, tree orig_type)
269 tree f, t, field_type, tail_array_field, field_offset;
273 if (TREE_CODE (record_type) != RECORD_TYPE
274 && TREE_CODE (record_type) != UNION_TYPE
275 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
278 /* Short-circuit silly cases. */
279 if (useless_type_conversion_p (record_type, orig_type))
282 tail_array_field = NULL_TREE;
283 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
287 if (TREE_CODE (f) != FIELD_DECL)
289 if (DECL_BIT_FIELD (f))
292 if (!DECL_FIELD_OFFSET (f))
294 field_offset = byte_position (f);
295 if (TREE_CODE (field_offset) != INTEGER_CST)
298 /* ??? Java creates "interesting" fields for representing base classes.
299 They have no name, and have no context. With no context, we get into
300 trouble with nonoverlapping_component_refs_p. Skip them. */
301 if (!DECL_FIELD_CONTEXT (f))
304 /* The previous array field isn't at the end. */
305 tail_array_field = NULL_TREE;
307 /* Check to see if this offset overlaps with the field. */
308 cmp = tree_int_cst_compare (field_offset, offset);
312 field_type = TREE_TYPE (f);
314 /* Here we exactly match the offset being checked. If the types match,
315 then we can return that field. */
317 && useless_type_conversion_p (orig_type, field_type))
319 t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
323 /* Don't care about offsets into the middle of scalars. */
324 if (!AGGREGATE_TYPE_P (field_type))
327 /* Check for array at the end of the struct. This is often
328 used as for flexible array members. We should be able to
329 turn this into an array access anyway. */
330 if (TREE_CODE (field_type) == ARRAY_TYPE)
331 tail_array_field = f;
333 /* Check the end of the field against the offset. */
334 if (!DECL_SIZE_UNIT (f)
335 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
337 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
338 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
341 /* If we matched, then set offset to the displacement into
343 new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
344 SET_EXPR_LOCATION (new_base, loc);
346 /* Recurse to possibly find the match. */
347 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
348 f == TYPE_FIELDS (record_type));
351 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
357 if (!tail_array_field)
360 f = tail_array_field;
361 field_type = TREE_TYPE (f);
362 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
364 /* If we get here, we've got an aggregate field, and a possibly
365 nonzero offset into them. Recurse and hope for a valid match. */
366 base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
367 SET_EXPR_LOCATION (base, loc);
369 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
370 f == TYPE_FIELDS (record_type));
373 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
377 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
378 or BASE[index] or by combination of those.
380 LOC is the location of original expression.
382 Before attempting the conversion strip off existing ADDR_EXPRs and
383 handled component refs. */
386 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
393 if (TREE_CODE (base) != ADDR_EXPR)
396 base = TREE_OPERAND (base, 0);
398 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
399 so it needs to be removed and new COMPONENT_REF constructed.
400 The wrong COMPONENT_REF are often constructed by folding the
401 (type *)&object within the expression (type *)&object+offset */
402 if (handled_component_p (base))
404 HOST_WIDE_INT sub_offset, size, maxsize;
406 newbase = get_ref_base_and_extent (base, &sub_offset,
408 gcc_assert (newbase);
411 && !(sub_offset & (BITS_PER_UNIT - 1)))
415 offset = int_const_binop (PLUS_EXPR, offset,
416 build_int_cst (TREE_TYPE (offset),
417 sub_offset / BITS_PER_UNIT), 1);
420 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
421 && integer_zerop (offset))
423 type = TREE_TYPE (base);
425 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
427 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
432 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
433 or &BASE[index] or by combination of those.
435 LOC is the location of the original expression.
437 Before attempting the conversion strip off existing component refs. */
440 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
445 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
446 && POINTER_TYPE_P (orig_type));
448 t = maybe_fold_offset_to_reference (loc, addr, offset,
449 TREE_TYPE (orig_type));
455 /* For __builtin_object_size to function correctly we need to
456 make sure not to fold address arithmetic so that we change
457 reference from one array to another. This would happen for
460 struct X { char s1[10]; char s2[10] } s;
461 char *foo (void) { return &s.s2[-4]; }
463 where we need to avoid generating &s.s1[6]. As the C and
464 C++ frontends create different initial trees
465 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
466 sophisticated comparisons here. Note that checking for the
467 condition after the fact is easier than trying to avoid doing
470 if (TREE_CODE (orig) == ADDR_EXPR)
471 orig = TREE_OPERAND (orig, 0);
472 if ((TREE_CODE (orig) == ARRAY_REF
473 || (TREE_CODE (orig) == COMPONENT_REF
474 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
475 && (TREE_CODE (t) == ARRAY_REF
476 || TREE_CODE (t) == COMPONENT_REF)
477 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
478 ? TREE_OPERAND (orig, 0) : orig,
479 TREE_CODE (t) == ARRAY_REF
480 ? TREE_OPERAND (t, 0) : t, 0))
483 ptr_type = build_pointer_type (TREE_TYPE (t));
484 if (!useless_type_conversion_p (orig_type, ptr_type))
486 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
492 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
493 Return the simplified expression, or NULL if nothing could be done. */
496 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
499 bool volatile_p = TREE_THIS_VOLATILE (expr);
500 location_t loc = EXPR_LOCATION (expr);
502 /* We may well have constructed a double-nested PLUS_EXPR via multiple
503 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
504 are sometimes added. */
506 STRIP_TYPE_NOPS (base);
507 TREE_OPERAND (expr, 0) = base;
509 /* One possibility is that the address reduces to a string constant. */
510 t = fold_read_from_constant_string (expr);
514 /* Add in any offset from a POINTER_PLUS_EXPR. */
515 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
519 offset2 = TREE_OPERAND (base, 1);
520 if (TREE_CODE (offset2) != INTEGER_CST)
522 base = TREE_OPERAND (base, 0);
524 offset = fold_convert (sizetype,
525 int_const_binop (PLUS_EXPR, offset, offset2, 1));
528 if (TREE_CODE (base) == ADDR_EXPR)
530 tree base_addr = base;
532 /* Strip the ADDR_EXPR. */
533 base = TREE_OPERAND (base, 0);
535 /* Fold away CONST_DECL to its value, if the type is scalar. */
536 if (TREE_CODE (base) == CONST_DECL
537 && is_gimple_min_invariant (DECL_INITIAL (base)))
538 return DECL_INITIAL (base);
540 /* If there is no offset involved simply return the folded base. */
541 if (integer_zerop (offset))
544 /* Try folding *(&B+O) to B.X. */
545 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
549 /* Preserve volatileness of the original expression.
550 We can end up with a plain decl here which is shared
551 and we shouldn't mess with its flags. */
553 TREE_THIS_VOLATILE (t) = volatile_p;
559 /* We can get here for out-of-range string constant accesses,
560 such as "_"[3]. Bail out of the entire substitution search
561 and arrange for the entire statement to be replaced by a
562 call to __builtin_trap. In all likelihood this will all be
563 constant-folded away, but in the meantime we can't leave with
564 something that get_expr_operands can't understand. */
568 if (TREE_CODE (t) == ADDR_EXPR
569 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
571 /* FIXME: Except that this causes problems elsewhere with dead
572 code not being deleted, and we die in the rtl expanders
573 because we failed to remove some ssa_name. In the meantime,
575 /* FIXME2: This condition should be signaled by
576 fold_read_from_constant_string directly, rather than
577 re-checking for it here. */
578 return integer_zero_node;
581 /* Try folding *(B+O) to B->X. Still an improvement. */
582 if (POINTER_TYPE_P (TREE_TYPE (base)))
584 t = maybe_fold_offset_to_reference (loc, base, offset,
591 /* Otherwise we had an offset that we could not simplify. */
596 /* A quaint feature extant in our address arithmetic is that there
597 can be hidden type changes here. The type of the result need
598 not be the same as the type of the input pointer.
600 What we're after here is an expression of the form
601 (T *)(&array + const)
602 where array is OP0, const is OP1, RES_TYPE is T and
603 the cast doesn't actually exist, but is implicit in the
604 type of the POINTER_PLUS_EXPR. We'd like to turn this into
606 which may be able to propagate further. */
609 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
614 /* The first operand should be an ADDR_EXPR. */
615 if (TREE_CODE (op0) != ADDR_EXPR)
617 op0 = TREE_OPERAND (op0, 0);
619 /* It had better be a constant. */
620 if (TREE_CODE (op1) != INTEGER_CST)
622 /* Or op0 should now be A[0] and the non-constant offset defined
623 via a multiplication by the array element size. */
624 if (TREE_CODE (op0) == ARRAY_REF
625 && integer_zerop (TREE_OPERAND (op0, 1))
626 && TREE_CODE (op1) == SSA_NAME
627 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
629 gimple offset_def = SSA_NAME_DEF_STMT (op1);
630 if (!is_gimple_assign (offset_def))
633 /* As we will end up creating a variable index array access
634 in the outermost array dimension make sure there isn't
635 a more inner array that the index could overflow to. */
636 if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
639 /* Do not build array references of something that we can't
640 see the true number of array dimensions for. */
641 if (!DECL_P (TREE_OPERAND (op0, 0))
642 && !handled_component_p (TREE_OPERAND (op0, 0)))
645 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
646 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
647 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
648 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
649 return build_fold_addr_expr
650 (build4 (ARRAY_REF, TREE_TYPE (op0),
651 TREE_OPERAND (op0, 0),
652 gimple_assign_rhs1 (offset_def),
653 TREE_OPERAND (op0, 2),
654 TREE_OPERAND (op0, 3)));
655 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
656 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
657 return build_fold_addr_expr
658 (build4 (ARRAY_REF, TREE_TYPE (op0),
659 TREE_OPERAND (op0, 0),
661 TREE_OPERAND (op0, 2),
662 TREE_OPERAND (op0, 3)));
667 /* If the first operand is an ARRAY_REF, expand it so that we can fold
668 the offset into it. */
669 while (TREE_CODE (op0) == ARRAY_REF)
671 tree array_obj = TREE_OPERAND (op0, 0);
672 tree array_idx = TREE_OPERAND (op0, 1);
673 tree elt_type = TREE_TYPE (op0);
674 tree elt_size = TYPE_SIZE_UNIT (elt_type);
677 if (TREE_CODE (array_idx) != INTEGER_CST)
679 if (TREE_CODE (elt_size) != INTEGER_CST)
682 /* Un-bias the index by the min index of the array type. */
683 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
686 min_idx = TYPE_MIN_VALUE (min_idx);
689 if (TREE_CODE (min_idx) != INTEGER_CST)
692 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
693 if (!integer_zerop (min_idx))
694 array_idx = int_const_binop (MINUS_EXPR, array_idx,
699 /* Convert the index to a byte offset. */
700 array_idx = fold_convert (sizetype, array_idx);
701 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
703 /* Update the operands for the next round, or for folding. */
704 op1 = int_const_binop (PLUS_EXPR,
709 ptd_type = TREE_TYPE (res_type);
710 /* If we want a pointer to void, reconstruct the reference from the
711 array element type. A pointer to that can be trivially converted
712 to void *. This happens as we fold (void *)(ptr p+ off). */
713 if (VOID_TYPE_P (ptd_type)
714 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
715 ptd_type = TREE_TYPE (TREE_TYPE (op0));
717 /* At which point we can try some of the same things as for indirects. */
718 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
720 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
724 t = build1 (ADDR_EXPR, res_type, t);
725 SET_EXPR_LOCATION (t, loc);
731 /* Subroutine of fold_stmt. We perform several simplifications of the
732 memory reference tree EXPR and make sure to re-gimplify them properly
733 after propagation of constant addresses. IS_LHS is true if the
734 reference is supposed to be an lvalue. */
737 maybe_fold_reference (tree expr, bool is_lhs)
741 if (TREE_CODE (expr) == ARRAY_REF
744 tree tem = fold_read_from_constant_string (expr);
749 /* ??? We might want to open-code the relevant remaining cases
750 to avoid using the generic fold. */
751 if (handled_component_p (*t)
752 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
754 tree tem = fold (*t);
759 while (handled_component_p (*t))
760 t = &TREE_OPERAND (*t, 0);
762 if (TREE_CODE (*t) == INDIRECT_REF)
764 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
766 /* Avoid folding *"abc" = 5 into 'a' = 5. */
767 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
770 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
771 /* If we had a good reason for propagating the address here,
772 make sure we end up with valid gimple. See PR34989. */
773 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
778 tem = maybe_fold_reference (expr, is_lhs);
787 tree tem = get_symbol_constant_value (*t);
789 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
791 *t = unshare_expr (tem);
792 tem = maybe_fold_reference (expr, is_lhs);
803 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
804 replacement rhs for the statement or NULL_TREE if no simplification
805 could be made. It is assumed that the operands have been previously
809 fold_gimple_assign (gimple_stmt_iterator *si)
811 gimple stmt = gsi_stmt (*si);
812 enum tree_code subcode = gimple_assign_rhs_code (stmt);
813 location_t loc = gimple_location (stmt);
815 tree result = NULL_TREE;
817 switch (get_gimple_rhs_class (subcode))
819 case GIMPLE_SINGLE_RHS:
821 tree rhs = gimple_assign_rhs1 (stmt);
823 /* Try to fold a conditional expression. */
824 if (TREE_CODE (rhs) == COND_EXPR)
826 tree op0 = COND_EXPR_COND (rhs);
829 location_t cond_loc = EXPR_LOCATION (rhs);
831 if (COMPARISON_CLASS_P (op0))
833 fold_defer_overflow_warnings ();
834 tem = fold_binary_loc (cond_loc,
835 TREE_CODE (op0), TREE_TYPE (op0),
836 TREE_OPERAND (op0, 0),
837 TREE_OPERAND (op0, 1));
838 /* This is actually a conditional expression, not a GIMPLE
839 conditional statement, however, the valid_gimple_rhs_p
840 test still applies. */
841 set = (tem && is_gimple_condexpr (tem)
842 && valid_gimple_rhs_p (tem));
843 fold_undefer_overflow_warnings (set, stmt, 0);
845 else if (is_gimple_min_invariant (op0))
854 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
855 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
858 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
859 return maybe_fold_tmr (rhs);
861 else if (REFERENCE_CLASS_P (rhs))
862 return maybe_fold_reference (rhs, false);
864 else if (TREE_CODE (rhs) == ADDR_EXPR)
866 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
868 result = fold_convert (TREE_TYPE (rhs),
869 build_fold_addr_expr_loc (loc, tem));
872 else if (TREE_CODE (rhs) == CONSTRUCTOR
873 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
874 && (CONSTRUCTOR_NELTS (rhs)
875 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
877 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
881 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
882 if (TREE_CODE (val) != INTEGER_CST
883 && TREE_CODE (val) != REAL_CST
884 && TREE_CODE (val) != FIXED_CST)
887 return build_vector_from_ctor (TREE_TYPE (rhs),
888 CONSTRUCTOR_ELTS (rhs));
891 else if (DECL_P (rhs))
892 return unshare_expr (get_symbol_constant_value (rhs));
894 /* If we couldn't fold the RHS, hand over to the generic
896 if (result == NULL_TREE)
899 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
900 that may have been added by fold, and "useless" type
901 conversions that might now be apparent due to propagation. */
902 STRIP_USELESS_TYPE_CONVERSION (result);
904 if (result != rhs && valid_gimple_rhs_p (result))
911 case GIMPLE_UNARY_RHS:
913 tree rhs = gimple_assign_rhs1 (stmt);
915 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
918 /* If the operation was a conversion do _not_ mark a
919 resulting constant with TREE_OVERFLOW if the original
920 constant was not. These conversions have implementation
921 defined behavior and retaining the TREE_OVERFLOW flag
922 here would confuse later passes such as VRP. */
923 if (CONVERT_EXPR_CODE_P (subcode)
924 && TREE_CODE (result) == INTEGER_CST
925 && TREE_CODE (rhs) == INTEGER_CST)
926 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
928 STRIP_USELESS_TYPE_CONVERSION (result);
929 if (valid_gimple_rhs_p (result))
932 else if (CONVERT_EXPR_CODE_P (subcode)
933 && POINTER_TYPE_P (gimple_expr_type (stmt))
934 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
936 tree type = gimple_expr_type (stmt);
937 tree t = maybe_fold_offset_to_address (loc,
938 gimple_assign_rhs1 (stmt),
939 integer_zero_node, type);
946 case GIMPLE_BINARY_RHS:
947 /* Try to fold pointer addition. */
948 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
950 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
951 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
953 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
954 if (!useless_type_conversion_p
955 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
956 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
958 result = maybe_fold_stmt_addition (gimple_location (stmt),
960 gimple_assign_rhs1 (stmt),
961 gimple_assign_rhs2 (stmt));
965 result = fold_binary_loc (loc, subcode,
966 TREE_TYPE (gimple_assign_lhs (stmt)),
967 gimple_assign_rhs1 (stmt),
968 gimple_assign_rhs2 (stmt));
972 STRIP_USELESS_TYPE_CONVERSION (result);
973 if (valid_gimple_rhs_p (result))
976 /* Fold might have produced non-GIMPLE, so if we trust it blindly
977 we lose canonicalization opportunities. Do not go again
978 through fold here though, or the same non-GIMPLE will be
980 if (commutative_tree_code (subcode)
981 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
982 gimple_assign_rhs2 (stmt), false))
983 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
984 gimple_assign_rhs2 (stmt),
985 gimple_assign_rhs1 (stmt));
989 case GIMPLE_TERNARY_RHS:
990 result = fold_ternary_loc (loc, subcode,
991 TREE_TYPE (gimple_assign_lhs (stmt)),
992 gimple_assign_rhs1 (stmt),
993 gimple_assign_rhs2 (stmt),
994 gimple_assign_rhs3 (stmt));
998 STRIP_USELESS_TYPE_CONVERSION (result);
999 if (valid_gimple_rhs_p (result))
1002 /* Fold might have produced non-GIMPLE, so if we trust it blindly
1003 we lose canonicalization opportunities. Do not go again
1004 through fold here though, or the same non-GIMPLE will be
1006 if (commutative_ternary_tree_code (subcode)
1007 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1008 gimple_assign_rhs2 (stmt), false))
1009 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
1010 gimple_assign_rhs2 (stmt),
1011 gimple_assign_rhs1 (stmt),
1012 gimple_assign_rhs3 (stmt));
1016 case GIMPLE_INVALID_RHS:
1023 /* Attempt to fold a conditional statement. Return true if any changes were
1024 made. We only attempt to fold the condition expression, and do not perform
1025 any transformation that would require alteration of the cfg. It is
1026 assumed that the operands have been previously folded. */
1029 fold_gimple_cond (gimple stmt)
1031 tree result = fold_binary_loc (gimple_location (stmt),
1032 gimple_cond_code (stmt),
1034 gimple_cond_lhs (stmt),
1035 gimple_cond_rhs (stmt));
1039 STRIP_USELESS_TYPE_CONVERSION (result);
1040 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
1042 gimple_cond_set_condition_from_tree (stmt, result);
1050 /* Convert EXPR into a GIMPLE value suitable for substitution on the
1051 RHS of an assignment. Insert the necessary statements before
1052 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
1053 is replaced. If the call is expected to produces a result, then it
1054 is replaced by an assignment of the new RHS to the result variable.
1055 If the result is to be ignored, then the call is replaced by a
1056 GIMPLE_NOP. A proper VDEF chain is retained by making the first
1057 VUSE and the last VDEF of the whole sequence be the same as the replaced
1058 statement and using new SSA names for stores in between. */
1061 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
1064 tree tmp = NULL_TREE; /* Silence warning. */
1065 gimple stmt, new_stmt;
1066 gimple_stmt_iterator i;
1067 gimple_seq stmts = gimple_seq_alloc();
1068 struct gimplify_ctx gctx;
1070 gimple laststore = NULL;
1073 stmt = gsi_stmt (*si_p);
1075 gcc_assert (is_gimple_call (stmt));
1077 lhs = gimple_call_lhs (stmt);
1078 reaching_vuse = gimple_vuse (stmt);
1080 push_gimplify_context (&gctx);
1082 if (lhs == NULL_TREE)
1083 gimplify_and_add (expr, &stmts);
1085 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
1087 pop_gimplify_context (NULL);
1089 if (gimple_has_location (stmt))
1090 annotate_all_with_location (stmts, gimple_location (stmt));
1092 /* The replacement can expose previously unreferenced variables. */
1093 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
1097 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1100 new_stmt = gsi_stmt (i);
1101 find_new_referenced_vars (new_stmt);
1102 mark_symbols_for_renaming (new_stmt);
1103 /* If the new statement has a VUSE, update it with exact SSA name we
1104 know will reach this one. */
1105 if (gimple_vuse (new_stmt))
1107 /* If we've also seen a previous store create a new VDEF for
1108 the latter one, and make that the new reaching VUSE. */
1111 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1112 gimple_set_vdef (laststore, reaching_vuse);
1113 update_stmt (laststore);
1116 gimple_set_vuse (new_stmt, reaching_vuse);
1117 gimple_set_modified (new_stmt, true);
1119 if (gimple_assign_single_p (new_stmt)
1120 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
1122 laststore = new_stmt;
1127 if (lhs == NULL_TREE)
1129 /* If we replace a call without LHS that has a VDEF and our new
1130 sequence ends with a store we must make that store have the same
1131 vdef in order not to break the sequencing. This can happen
1132 for instance when folding memcpy calls into assignments. */
1133 if (gimple_vdef (stmt) && laststore)
1135 gimple_set_vdef (laststore, gimple_vdef (stmt));
1136 move_ssa_defining_stmt_for_defs (laststore, stmt);
1137 update_stmt (laststore);
1141 unlink_stmt_vdef (stmt);
1142 release_defs (stmt);
1150 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1155 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1156 gimple_set_vdef (laststore, reaching_vuse);
1157 update_stmt (laststore);
1160 new_stmt = gimple_build_assign (lhs, tmp);
1161 gimple_set_vuse (new_stmt, reaching_vuse);
1162 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1163 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
1166 gimple_set_location (new_stmt, gimple_location (stmt));
1167 gsi_replace (si_p, new_stmt, false);
1170 /* Return the string length, maximum string length or maximum value of
1172 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1173 is not NULL and, for TYPE == 0, its value is not equal to the length
1174 we determine or if we are unable to determine the length or value,
1175 return false. VISITED is a bitmap of visited variables.
1176 TYPE is 0 if string length should be returned, 1 for maximum string
1177 length and 2 for maximum value ARG can have. */
1180 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1185 if (TREE_CODE (arg) != SSA_NAME)
1187 if (TREE_CODE (arg) == COND_EXPR)
1188 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1189 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1190 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1191 else if (TREE_CODE (arg) == ADDR_EXPR
1192 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1193 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1195 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1196 if (TREE_CODE (aop0) == INDIRECT_REF
1197 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1198 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1199 length, visited, type);
1205 if (TREE_CODE (val) != INTEGER_CST
1206 || tree_int_cst_sgn (val) < 0)
1210 val = c_strlen (arg, 1);
1218 if (TREE_CODE (*length) != INTEGER_CST
1219 || TREE_CODE (val) != INTEGER_CST)
1222 if (tree_int_cst_lt (*length, val))
1226 else if (simple_cst_equal (val, *length) != 1)
1234 /* If we were already here, break the infinite cycle. */
1235 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1237 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1240 def_stmt = SSA_NAME_DEF_STMT (var);
1242 switch (gimple_code (def_stmt))
1245 /* The RHS of the statement defining VAR must either have a
1246 constant length or come from another SSA_NAME with a constant
1248 if (gimple_assign_single_p (def_stmt)
1249 || gimple_assign_unary_nop_p (def_stmt))
1251 tree rhs = gimple_assign_rhs1 (def_stmt);
1252 return get_maxval_strlen (rhs, length, visited, type);
1258 /* All the arguments of the PHI node must have the same constant
1262 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1264 tree arg = gimple_phi_arg (def_stmt, i)->def;
1266 /* If this PHI has itself as an argument, we cannot
1267 determine the string length of this argument. However,
1268 if we can find a constant string length for the other
1269 PHI args then we can still be sure that this is a
1270 constant string length. So be optimistic and just
1271 continue with the next argument. */
1272 if (arg == gimple_phi_result (def_stmt))
1275 if (!get_maxval_strlen (arg, length, visited, type))
1287 /* Fold builtin call in statement STMT. Returns a simplified tree.
1288 We may return a non-constant expression, including another call
1289 to a different function and with different arguments, e.g.,
1290 substituting memcpy for strcpy when the string length is known.
1291 Note that some builtins expand into inline code that may not
1292 be valid in GIMPLE. Callers must take care. */
1295 gimple_fold_builtin (gimple stmt)
1297 tree result, val[3];
1303 location_t loc = gimple_location (stmt);
1305 gcc_assert (is_gimple_call (stmt));
1307 ignore = (gimple_call_lhs (stmt) == NULL);
1309 /* First try the generic builtin folder. If that succeeds, return the
1311 result = fold_call_stmt (stmt, ignore);
1315 STRIP_NOPS (result);
1319 /* Ignore MD builtins. */
1320 callee = gimple_call_fndecl (stmt);
1321 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1324 /* If the builtin could not be folded, and it has no argument list,
1326 nargs = gimple_call_num_args (stmt);
1330 /* Limit the work only for builtins we know how to simplify. */
1331 switch (DECL_FUNCTION_CODE (callee))
1333 case BUILT_IN_STRLEN:
1334 case BUILT_IN_FPUTS:
1335 case BUILT_IN_FPUTS_UNLOCKED:
1339 case BUILT_IN_STRCPY:
1340 case BUILT_IN_STRNCPY:
1344 case BUILT_IN_MEMCPY_CHK:
1345 case BUILT_IN_MEMPCPY_CHK:
1346 case BUILT_IN_MEMMOVE_CHK:
1347 case BUILT_IN_MEMSET_CHK:
1348 case BUILT_IN_STRNCPY_CHK:
1352 case BUILT_IN_STRCPY_CHK:
1353 case BUILT_IN_STPCPY_CHK:
1357 case BUILT_IN_SNPRINTF_CHK:
1358 case BUILT_IN_VSNPRINTF_CHK:
1366 if (arg_idx >= nargs)
1369 /* Try to use the dataflow information gathered by the CCP process. */
1370 visited = BITMAP_ALLOC (NULL);
1371 bitmap_clear (visited);
1373 memset (val, 0, sizeof (val));
1374 a = gimple_call_arg (stmt, arg_idx);
1375 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1376 val[arg_idx] = NULL_TREE;
1378 BITMAP_FREE (visited);
1381 switch (DECL_FUNCTION_CODE (callee))
1383 case BUILT_IN_STRLEN:
1384 if (val[0] && nargs == 1)
1387 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1389 /* If the result is not a valid gimple value, or not a cast
1390 of a valid gimple value, then we can not use the result. */
1391 if (is_gimple_val (new_val)
1392 || (is_gimple_cast (new_val)
1393 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1398 case BUILT_IN_STRCPY:
1399 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1400 result = fold_builtin_strcpy (loc, callee,
1401 gimple_call_arg (stmt, 0),
1402 gimple_call_arg (stmt, 1),
1406 case BUILT_IN_STRNCPY:
1407 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1408 result = fold_builtin_strncpy (loc, callee,
1409 gimple_call_arg (stmt, 0),
1410 gimple_call_arg (stmt, 1),
1411 gimple_call_arg (stmt, 2),
1415 case BUILT_IN_FPUTS:
1417 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1418 gimple_call_arg (stmt, 1),
1419 ignore, false, val[0]);
1422 case BUILT_IN_FPUTS_UNLOCKED:
1424 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1425 gimple_call_arg (stmt, 1),
1426 ignore, true, val[0]);
1429 case BUILT_IN_MEMCPY_CHK:
1430 case BUILT_IN_MEMPCPY_CHK:
1431 case BUILT_IN_MEMMOVE_CHK:
1432 case BUILT_IN_MEMSET_CHK:
1433 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1434 result = fold_builtin_memory_chk (loc, callee,
1435 gimple_call_arg (stmt, 0),
1436 gimple_call_arg (stmt, 1),
1437 gimple_call_arg (stmt, 2),
1438 gimple_call_arg (stmt, 3),
1440 DECL_FUNCTION_CODE (callee));
1443 case BUILT_IN_STRCPY_CHK:
1444 case BUILT_IN_STPCPY_CHK:
1445 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1446 result = fold_builtin_stxcpy_chk (loc, callee,
1447 gimple_call_arg (stmt, 0),
1448 gimple_call_arg (stmt, 1),
1449 gimple_call_arg (stmt, 2),
1451 DECL_FUNCTION_CODE (callee));
1454 case BUILT_IN_STRNCPY_CHK:
1455 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1456 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1457 gimple_call_arg (stmt, 1),
1458 gimple_call_arg (stmt, 2),
1459 gimple_call_arg (stmt, 3),
1463 case BUILT_IN_SNPRINTF_CHK:
1464 case BUILT_IN_VSNPRINTF_CHK:
1465 if (val[1] && is_gimple_val (val[1]))
1466 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1467 DECL_FUNCTION_CODE (callee));
1474 if (result && ignore)
1475 result = fold_ignored_result (result);
1479 /* Return the first of the base binfos of BINFO that has virtual functions. */
1482 get_first_base_binfo_with_virtuals (tree binfo)
1487 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1488 if (BINFO_VIRTUALS (base_binfo))
1495 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1496 it is found or NULL_TREE if it is not. */
1499 get_base_binfo_for_type (tree binfo, tree type)
1503 tree res = NULL_TREE;
1505 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1506 if (TREE_TYPE (base_binfo) == type)
1515 /* Return a binfo describing the part of object referenced by expression REF.
1516 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1517 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1518 a simple declaration, indirect reference or an SSA_NAME. If the function
1519 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1520 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1521 Otherwise the first non-artificial field declaration or the base declaration
1522 will be examined to get the encapsulating type. */
1525 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1529 if (TREE_CODE (ref) == COMPONENT_REF)
1532 tree binfo, base_binfo;
1533 tree field = TREE_OPERAND (ref, 1);
1535 if (!DECL_ARTIFICIAL (field))
1537 tree type = TREE_TYPE (field);
1538 if (TREE_CODE (type) == RECORD_TYPE)
1539 return TYPE_BINFO (type);
1544 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1545 binfo = TYPE_BINFO (par_type);
1547 || BINFO_N_BASE_BINFOS (binfo) == 0)
1550 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1551 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1555 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1557 /* Get descendant binfo. */
1560 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1563 ref = TREE_OPERAND (ref, 0);
1565 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1566 return TYPE_BINFO (TREE_TYPE (ref));
1567 else if (known_binfo
1568 && (TREE_CODE (ref) == SSA_NAME
1569 || TREE_CODE (ref) == INDIRECT_REF))
1576 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1577 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1578 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1581 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1586 v = BINFO_VIRTUALS (known_binfo);
1590 i += (TARGET_VTABLE_USES_DESCRIPTORS
1591 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1595 fndecl = TREE_VALUE (v);
1596 return build_fold_addr_expr (fndecl);
1600 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1601 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1602 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1603 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1604 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1607 gimple_fold_obj_type_ref (tree ref, tree known_type)
1609 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1610 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1613 if (TREE_CODE (obj) == ADDR_EXPR)
1614 obj = TREE_OPERAND (obj, 0);
1616 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1619 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1620 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1626 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1627 The statement may be replaced by another statement, e.g., if the call
1628 simplifies to a constant value. Return true if any changes were made.
1629 It is assumed that the operands have been previously folded. */
1632 fold_gimple_call (gimple_stmt_iterator *gsi)
1634 gimple stmt = gsi_stmt (*gsi);
1636 tree callee = gimple_call_fndecl (stmt);
1638 /* Check for builtins that CCP can handle using information not
1639 available in the generic fold routines. */
1640 if (callee && DECL_BUILT_IN (callee))
1642 tree result = gimple_fold_builtin (stmt);
1646 if (!update_call_from_tree (gsi, result))
1647 gimplify_and_update_call_from_tree (gsi, result);
1653 /* ??? Should perhaps do this in fold proper. However, doing it
1654 there requires that we create a new CALL_EXPR, and that requires
1655 copying EH region info to the new node. Easier to just do it
1656 here where we can just smash the call operand. */
1657 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1658 callee = gimple_call_fn (stmt);
1659 if (TREE_CODE (callee) == OBJ_TYPE_REF
1660 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1664 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1667 gimple_call_set_fn (stmt, t);
1676 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1677 distinguishes both cases. */
1680 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1682 bool changed = false;
1683 gimple stmt = gsi_stmt (*gsi);
1686 /* Fold the main computation performed by the statement. */
1687 switch (gimple_code (stmt))
1691 unsigned old_num_ops = gimple_num_ops (stmt);
1692 tree new_rhs = fold_gimple_assign (gsi);
1693 tree lhs = gimple_assign_lhs (stmt);
1695 && !useless_type_conversion_p (TREE_TYPE (lhs),
1696 TREE_TYPE (new_rhs)))
1697 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1700 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1702 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1709 changed |= fold_gimple_cond (stmt);
1713 /* Fold *& in call arguments. */
1714 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1715 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1717 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1720 gimple_call_set_arg (stmt, i, tmp);
1724 /* The entire statement may be replaced in this case. */
1726 changed |= fold_gimple_call (gsi);
1730 /* Fold *& in asm operands. */
1731 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1733 tree link = gimple_asm_output_op (stmt, i);
1734 tree op = TREE_VALUE (link);
1735 if (REFERENCE_CLASS_P (op)
1736 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1738 TREE_VALUE (link) = op;
1742 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1744 tree link = gimple_asm_input_op (stmt, i);
1745 tree op = TREE_VALUE (link);
1746 if (REFERENCE_CLASS_P (op)
1747 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1749 TREE_VALUE (link) = op;
1758 stmt = gsi_stmt (*gsi);
1760 /* Fold *& on the lhs. */
1761 if (gimple_has_lhs (stmt))
1763 tree lhs = gimple_get_lhs (stmt);
1764 if (lhs && REFERENCE_CLASS_P (lhs))
1766 tree new_lhs = maybe_fold_reference (lhs, true);
1769 gimple_set_lhs (stmt, new_lhs);
1778 /* Fold the statement pointed to by GSI. In some cases, this function may
1779 replace the whole statement with a new one. Returns true iff folding
1781 The statement pointed to by GSI should be in valid gimple form but may
1782 be in unfolded state as resulting from for example constant propagation
1783 which can produce *&x = 0. */
1786 fold_stmt (gimple_stmt_iterator *gsi)
1788 return fold_stmt_1 (gsi, false);
1791 /* Perform the minimal folding on statement STMT. Only operations like
1792 *&x created by constant propagation are handled. The statement cannot
1793 be replaced with a new one. Return true if the statement was
1794 changed, false otherwise.
1795 The statement STMT should be in valid gimple form but may
1796 be in unfolded state as resulting from for example constant propagation
1797 which can produce *&x = 0. */
1800 fold_stmt_inplace (gimple stmt)
1802 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1803 bool changed = fold_stmt_1 (&gsi, true);
1804 gcc_assert (gsi_stmt (gsi) == stmt);
1808 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1809 if EXPR is null or we don't know how.
1810 If non-null, the result always has boolean type. */
1813 canonicalize_bool (tree expr, bool invert)
1819 if (integer_nonzerop (expr))
1820 return boolean_false_node;
1821 else if (integer_zerop (expr))
1822 return boolean_true_node;
1823 else if (TREE_CODE (expr) == SSA_NAME)
1824 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1825 build_int_cst (TREE_TYPE (expr), 0));
1826 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1827 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1829 TREE_OPERAND (expr, 0),
1830 TREE_OPERAND (expr, 1));
1836 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1838 if (integer_nonzerop (expr))
1839 return boolean_true_node;
1840 else if (integer_zerop (expr))
1841 return boolean_false_node;
1842 else if (TREE_CODE (expr) == SSA_NAME)
1843 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1844 build_int_cst (TREE_TYPE (expr), 0));
1845 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1846 return fold_build2 (TREE_CODE (expr),
1848 TREE_OPERAND (expr, 0),
1849 TREE_OPERAND (expr, 1));
1855 /* Check to see if a boolean expression EXPR is logically equivalent to the
1856 comparison (OP1 CODE OP2). Check for various identities involving
1860 same_bool_comparison_p (const_tree expr, enum tree_code code,
1861 const_tree op1, const_tree op2)
1865 /* The obvious case. */
1866 if (TREE_CODE (expr) == code
1867 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1868 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1871 /* Check for comparing (name, name != 0) and the case where expr
1872 is an SSA_NAME with a definition matching the comparison. */
1873 if (TREE_CODE (expr) == SSA_NAME
1874 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1876 if (operand_equal_p (expr, op1, 0))
1877 return ((code == NE_EXPR && integer_zerop (op2))
1878 || (code == EQ_EXPR && integer_nonzerop (op2)));
1879 s = SSA_NAME_DEF_STMT (expr);
1880 if (is_gimple_assign (s)
1881 && gimple_assign_rhs_code (s) == code
1882 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1883 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1887 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1888 of name is a comparison, recurse. */
1889 if (TREE_CODE (op1) == SSA_NAME
1890 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1892 s = SSA_NAME_DEF_STMT (op1);
1893 if (is_gimple_assign (s)
1894 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1896 enum tree_code c = gimple_assign_rhs_code (s);
1897 if ((c == NE_EXPR && integer_zerop (op2))
1898 || (c == EQ_EXPR && integer_nonzerop (op2)))
1899 return same_bool_comparison_p (expr, c,
1900 gimple_assign_rhs1 (s),
1901 gimple_assign_rhs2 (s));
1902 if ((c == EQ_EXPR && integer_zerop (op2))
1903 || (c == NE_EXPR && integer_nonzerop (op2)))
1904 return same_bool_comparison_p (expr,
1905 invert_tree_comparison (c, false),
1906 gimple_assign_rhs1 (s),
1907 gimple_assign_rhs2 (s));
1913 /* Check to see if two boolean expressions OP1 and OP2 are logically
1917 same_bool_result_p (const_tree op1, const_tree op2)
1919 /* Simple cases first. */
1920 if (operand_equal_p (op1, op2, 0))
1923 /* Check the cases where at least one of the operands is a comparison.
1924 These are a bit smarter than operand_equal_p in that they apply some
1925 identifies on SSA_NAMEs. */
1926 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1927 && same_bool_comparison_p (op1, TREE_CODE (op2),
1928 TREE_OPERAND (op2, 0),
1929 TREE_OPERAND (op2, 1)))
1931 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1932 && same_bool_comparison_p (op2, TREE_CODE (op1),
1933 TREE_OPERAND (op1, 0),
1934 TREE_OPERAND (op1, 1)))
1941 /* Forward declarations for some mutually recursive functions. */
1944 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1945 enum tree_code code2, tree op2a, tree op2b);
1947 and_var_with_comparison (tree var, bool invert,
1948 enum tree_code code2, tree op2a, tree op2b);
1950 and_var_with_comparison_1 (gimple stmt,
1951 enum tree_code code2, tree op2a, tree op2b);
1953 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1954 enum tree_code code2, tree op2a, tree op2b);
1956 or_var_with_comparison (tree var, bool invert,
1957 enum tree_code code2, tree op2a, tree op2b);
1959 or_var_with_comparison_1 (gimple stmt,
1960 enum tree_code code2, tree op2a, tree op2b);
1962 /* Helper function for and_comparisons_1: try to simplify the AND of the
1963 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1964 If INVERT is true, invert the value of the VAR before doing the AND.
1965 Return NULL_EXPR if we can't simplify this to a single expression. */
1968 and_var_with_comparison (tree var, bool invert,
1969 enum tree_code code2, tree op2a, tree op2b)
1972 gimple stmt = SSA_NAME_DEF_STMT (var);
1974 /* We can only deal with variables whose definitions are assignments. */
1975 if (!is_gimple_assign (stmt))
1978 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1979 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1980 Then we only have to consider the simpler non-inverted cases. */
1982 t = or_var_with_comparison_1 (stmt,
1983 invert_tree_comparison (code2, false),
1986 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1987 return canonicalize_bool (t, invert);
1990 /* Try to simplify the AND of the ssa variable defined by the assignment
1991 STMT with the comparison specified by (OP2A CODE2 OP2B).
1992 Return NULL_EXPR if we can't simplify this to a single expression. */
1995 and_var_with_comparison_1 (gimple stmt,
1996 enum tree_code code2, tree op2a, tree op2b)
1998 tree var = gimple_assign_lhs (stmt);
1999 tree true_test_var = NULL_TREE;
2000 tree false_test_var = NULL_TREE;
2001 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2003 /* Check for identities like (var AND (var == 0)) => false. */
2004 if (TREE_CODE (op2a) == SSA_NAME
2005 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2007 if ((code2 == NE_EXPR && integer_zerop (op2b))
2008 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2010 true_test_var = op2a;
2011 if (var == true_test_var)
2014 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2015 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2017 false_test_var = op2a;
2018 if (var == false_test_var)
2019 return boolean_false_node;
2023 /* If the definition is a comparison, recurse on it. */
2024 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2026 tree t = and_comparisons_1 (innercode,
2027 gimple_assign_rhs1 (stmt),
2028 gimple_assign_rhs2 (stmt),
2036 /* If the definition is an AND or OR expression, we may be able to
2037 simplify by reassociating. */
2038 if (innercode == TRUTH_AND_EXPR
2039 || innercode == TRUTH_OR_EXPR
2040 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2041 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2043 tree inner1 = gimple_assign_rhs1 (stmt);
2044 tree inner2 = gimple_assign_rhs2 (stmt);
2047 tree partial = NULL_TREE;
2048 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
2050 /* Check for boolean identities that don't require recursive examination
2052 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
2053 inner1 AND (inner1 OR inner2) => inner1
2054 !inner1 AND (inner1 AND inner2) => false
2055 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
2056 Likewise for similar cases involving inner2. */
2057 if (inner1 == true_test_var)
2058 return (is_and ? var : inner1);
2059 else if (inner2 == true_test_var)
2060 return (is_and ? var : inner2);
2061 else if (inner1 == false_test_var)
2063 ? boolean_false_node
2064 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
2065 else if (inner2 == false_test_var)
2067 ? boolean_false_node
2068 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
2070 /* Next, redistribute/reassociate the AND across the inner tests.
2071 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2072 if (TREE_CODE (inner1) == SSA_NAME
2073 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2074 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2075 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2076 gimple_assign_rhs1 (s),
2077 gimple_assign_rhs2 (s),
2078 code2, op2a, op2b)))
2080 /* Handle the AND case, where we are reassociating:
2081 (inner1 AND inner2) AND (op2a code2 op2b)
2083 If the partial result t is a constant, we win. Otherwise
2084 continue on to try reassociating with the other inner test. */
2087 if (integer_onep (t))
2089 else if (integer_zerop (t))
2090 return boolean_false_node;
2093 /* Handle the OR case, where we are redistributing:
2094 (inner1 OR inner2) AND (op2a code2 op2b)
2095 => (t OR (inner2 AND (op2a code2 op2b))) */
2098 if (integer_onep (t))
2099 return boolean_true_node;
2101 /* Save partial result for later. */
2106 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2107 if (TREE_CODE (inner2) == SSA_NAME
2108 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2109 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2110 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2111 gimple_assign_rhs1 (s),
2112 gimple_assign_rhs2 (s),
2113 code2, op2a, op2b)))
2115 /* Handle the AND case, where we are reassociating:
2116 (inner1 AND inner2) AND (op2a code2 op2b)
2117 => (inner1 AND t) */
2120 if (integer_onep (t))
2122 else if (integer_zerop (t))
2123 return boolean_false_node;
2126 /* Handle the OR case. where we are redistributing:
2127 (inner1 OR inner2) AND (op2a code2 op2b)
2128 => (t OR (inner1 AND (op2a code2 op2b)))
2129 => (t OR partial) */
2132 if (integer_onep (t))
2133 return boolean_true_node;
2136 /* We already got a simplification for the other
2137 operand to the redistributed OR expression. The
2138 interesting case is when at least one is false.
2139 Or, if both are the same, we can apply the identity
2141 if (integer_zerop (partial))
2143 else if (integer_zerop (t))
2145 else if (same_bool_result_p (t, partial))
2154 /* Try to simplify the AND of two comparisons defined by
2155 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2156 If this can be done without constructing an intermediate value,
2157 return the resulting tree; otherwise NULL_TREE is returned.
2158 This function is deliberately asymmetric as it recurses on SSA_DEFs
2159 in the first comparison but not the second. */
2162 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2163 enum tree_code code2, tree op2a, tree op2b)
2165 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2166 if (operand_equal_p (op1a, op2a, 0)
2167 && operand_equal_p (op1b, op2b, 0))
2169 tree t = combine_comparisons (UNKNOWN_LOCATION,
2170 TRUTH_ANDIF_EXPR, code1, code2,
2171 boolean_type_node, op1a, op1b);
2176 /* Likewise the swapped case of the above. */
2177 if (operand_equal_p (op1a, op2b, 0)
2178 && operand_equal_p (op1b, op2a, 0))
2180 tree t = combine_comparisons (UNKNOWN_LOCATION,
2181 TRUTH_ANDIF_EXPR, code1,
2182 swap_tree_comparison (code2),
2183 boolean_type_node, op1a, op1b);
2188 /* If both comparisons are of the same value against constants, we might
2189 be able to merge them. */
2190 if (operand_equal_p (op1a, op2a, 0)
2191 && TREE_CODE (op1b) == INTEGER_CST
2192 && TREE_CODE (op2b) == INTEGER_CST)
2194 int cmp = tree_int_cst_compare (op1b, op2b);
2196 /* If we have (op1a == op1b), we should either be able to
2197 return that or FALSE, depending on whether the constant op1b
2198 also satisfies the other comparison against op2b. */
2199 if (code1 == EQ_EXPR)
2205 case EQ_EXPR: val = (cmp == 0); break;
2206 case NE_EXPR: val = (cmp != 0); break;
2207 case LT_EXPR: val = (cmp < 0); break;
2208 case GT_EXPR: val = (cmp > 0); break;
2209 case LE_EXPR: val = (cmp <= 0); break;
2210 case GE_EXPR: val = (cmp >= 0); break;
2211 default: done = false;
2216 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2218 return boolean_false_node;
2221 /* Likewise if the second comparison is an == comparison. */
2222 else if (code2 == EQ_EXPR)
2228 case EQ_EXPR: val = (cmp == 0); break;
2229 case NE_EXPR: val = (cmp != 0); break;
2230 case LT_EXPR: val = (cmp > 0); break;
2231 case GT_EXPR: val = (cmp < 0); break;
2232 case LE_EXPR: val = (cmp >= 0); break;
2233 case GE_EXPR: val = (cmp <= 0); break;
2234 default: done = false;
2239 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2241 return boolean_false_node;
2245 /* Same business with inequality tests. */
2246 else if (code1 == NE_EXPR)
2251 case EQ_EXPR: val = (cmp != 0); break;
2252 case NE_EXPR: val = (cmp == 0); break;
2253 case LT_EXPR: val = (cmp >= 0); break;
2254 case GT_EXPR: val = (cmp <= 0); break;
2255 case LE_EXPR: val = (cmp > 0); break;
2256 case GE_EXPR: val = (cmp < 0); break;
2261 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2263 else if (code2 == NE_EXPR)
2268 case EQ_EXPR: val = (cmp == 0); break;
2269 case NE_EXPR: val = (cmp != 0); break;
2270 case LT_EXPR: val = (cmp <= 0); break;
2271 case GT_EXPR: val = (cmp >= 0); break;
2272 case LE_EXPR: val = (cmp < 0); break;
2273 case GE_EXPR: val = (cmp > 0); break;
2278 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2281 /* Chose the more restrictive of two < or <= comparisons. */
2282 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2283 && (code2 == LT_EXPR || code2 == LE_EXPR))
2285 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2286 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2288 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2291 /* Likewise chose the more restrictive of two > or >= comparisons. */
2292 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2293 && (code2 == GT_EXPR || code2 == GE_EXPR))
2295 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2296 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2298 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2301 /* Check for singleton ranges. */
2303 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2304 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2305 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2307 /* Check for disjoint ranges. */
2309 && (code1 == LT_EXPR || code1 == LE_EXPR)
2310 && (code2 == GT_EXPR || code2 == GE_EXPR))
2311 return boolean_false_node;
2313 && (code1 == GT_EXPR || code1 == GE_EXPR)
2314 && (code2 == LT_EXPR || code2 == LE_EXPR))
2315 return boolean_false_node;
2318 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2319 NAME's definition is a truth value. See if there are any simplifications
2320 that can be done against the NAME's definition. */
2321 if (TREE_CODE (op1a) == SSA_NAME
2322 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2323 && (integer_zerop (op1b) || integer_onep (op1b)))
2325 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2326 || (code1 == NE_EXPR && integer_onep (op1b)));
2327 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2328 switch (gimple_code (stmt))
2331 /* Try to simplify by copy-propagating the definition. */
2332 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2335 /* If every argument to the PHI produces the same result when
2336 ANDed with the second comparison, we win.
2337 Do not do this unless the type is bool since we need a bool
2338 result here anyway. */
2339 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2341 tree result = NULL_TREE;
2343 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2345 tree arg = gimple_phi_arg_def (stmt, i);
2347 /* If this PHI has itself as an argument, ignore it.
2348 If all the other args produce the same result,
2350 if (arg == gimple_phi_result (stmt))
2352 else if (TREE_CODE (arg) == INTEGER_CST)
2354 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2357 result = boolean_false_node;
2358 else if (!integer_zerop (result))
2362 result = fold_build2 (code2, boolean_type_node,
2364 else if (!same_bool_comparison_p (result,
2368 else if (TREE_CODE (arg) == SSA_NAME)
2370 tree temp = and_var_with_comparison (arg, invert,
2376 else if (!same_bool_result_p (result, temp))
2392 /* Try to simplify the AND of two comparisons, specified by
2393 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2394 If this can be simplified to a single expression (without requiring
2395 introducing more SSA variables to hold intermediate values),
2396 return the resulting tree. Otherwise return NULL_TREE.
2397 If the result expression is non-null, it has boolean type. */
2400 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2401 enum tree_code code2, tree op2a, tree op2b)
2403 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2407 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2410 /* Helper function for or_comparisons_1: try to simplify the OR of the
2411 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2412 If INVERT is true, invert the value of VAR before doing the OR.
2413 Return NULL_EXPR if we can't simplify this to a single expression. */
2416 or_var_with_comparison (tree var, bool invert,
2417 enum tree_code code2, tree op2a, tree op2b)
2420 gimple stmt = SSA_NAME_DEF_STMT (var);
2422 /* We can only deal with variables whose definitions are assignments. */
2423 if (!is_gimple_assign (stmt))
2426 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2427 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2428 Then we only have to consider the simpler non-inverted cases. */
2430 t = and_var_with_comparison_1 (stmt,
2431 invert_tree_comparison (code2, false),
2434 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2435 return canonicalize_bool (t, invert);
2438 /* Try to simplify the OR of the ssa variable defined by the assignment
2439 STMT with the comparison specified by (OP2A CODE2 OP2B).
2440 Return NULL_EXPR if we can't simplify this to a single expression. */
2443 or_var_with_comparison_1 (gimple stmt,
2444 enum tree_code code2, tree op2a, tree op2b)
2446 tree var = gimple_assign_lhs (stmt);
2447 tree true_test_var = NULL_TREE;
2448 tree false_test_var = NULL_TREE;
2449 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2451 /* Check for identities like (var OR (var != 0)) => true . */
2452 if (TREE_CODE (op2a) == SSA_NAME
2453 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2455 if ((code2 == NE_EXPR && integer_zerop (op2b))
2456 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2458 true_test_var = op2a;
2459 if (var == true_test_var)
2462 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2463 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2465 false_test_var = op2a;
2466 if (var == false_test_var)
2467 return boolean_true_node;
2471 /* If the definition is a comparison, recurse on it. */
2472 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2474 tree t = or_comparisons_1 (innercode,
2475 gimple_assign_rhs1 (stmt),
2476 gimple_assign_rhs2 (stmt),
2484 /* If the definition is an AND or OR expression, we may be able to
2485 simplify by reassociating. */
2486 if (innercode == TRUTH_AND_EXPR
2487 || innercode == TRUTH_OR_EXPR
2488 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2489 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2491 tree inner1 = gimple_assign_rhs1 (stmt);
2492 tree inner2 = gimple_assign_rhs2 (stmt);
2495 tree partial = NULL_TREE;
2496 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2498 /* Check for boolean identities that don't require recursive examination
2500 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2501 inner1 OR (inner1 AND inner2) => inner1
2502 !inner1 OR (inner1 OR inner2) => true
2503 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2505 if (inner1 == true_test_var)
2506 return (is_or ? var : inner1);
2507 else if (inner2 == true_test_var)
2508 return (is_or ? var : inner2);
2509 else if (inner1 == false_test_var)
2512 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2513 else if (inner2 == false_test_var)
2516 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2518 /* Next, redistribute/reassociate the OR across the inner tests.
2519 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2520 if (TREE_CODE (inner1) == SSA_NAME
2521 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2522 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2523 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2524 gimple_assign_rhs1 (s),
2525 gimple_assign_rhs2 (s),
2526 code2, op2a, op2b)))
2528 /* Handle the OR case, where we are reassociating:
2529 (inner1 OR inner2) OR (op2a code2 op2b)
2531 If the partial result t is a constant, we win. Otherwise
2532 continue on to try reassociating with the other inner test. */
2533 if (innercode == TRUTH_OR_EXPR)
2535 if (integer_onep (t))
2536 return boolean_true_node;
2537 else if (integer_zerop (t))
2541 /* Handle the AND case, where we are redistributing:
2542 (inner1 AND inner2) OR (op2a code2 op2b)
2543 => (t AND (inner2 OR (op2a code op2b))) */
2546 if (integer_zerop (t))
2547 return boolean_false_node;
2549 /* Save partial result for later. */
2554 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2555 if (TREE_CODE (inner2) == SSA_NAME
2556 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2557 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2558 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2559 gimple_assign_rhs1 (s),
2560 gimple_assign_rhs2 (s),
2561 code2, op2a, op2b)))
2563 /* Handle the OR case, where we are reassociating:
2564 (inner1 OR inner2) OR (op2a code2 op2b)
2566 if (innercode == TRUTH_OR_EXPR)
2568 if (integer_zerop (t))
2570 else if (integer_onep (t))
2571 return boolean_true_node;
2574 /* Handle the AND case, where we are redistributing:
2575 (inner1 AND inner2) OR (op2a code2 op2b)
2576 => (t AND (inner1 OR (op2a code2 op2b)))
2577 => (t AND partial) */
2580 if (integer_zerop (t))
2581 return boolean_false_node;
2584 /* We already got a simplification for the other
2585 operand to the redistributed AND expression. The
2586 interesting case is when at least one is true.
2587 Or, if both are the same, we can apply the identity
2588 (x AND x) == true. */
2589 if (integer_onep (partial))
2591 else if (integer_onep (t))
2593 else if (same_bool_result_p (t, partial))
2594 return boolean_true_node;
2602 /* Try to simplify the OR of two comparisons defined by
2603 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2604 If this can be done without constructing an intermediate value,
2605 return the resulting tree; otherwise NULL_TREE is returned.
2606 This function is deliberately asymmetric as it recurses on SSA_DEFs
2607 in the first comparison but not the second. */
2610 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2611 enum tree_code code2, tree op2a, tree op2b)
2613 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2614 if (operand_equal_p (op1a, op2a, 0)
2615 && operand_equal_p (op1b, op2b, 0))
2617 tree t = combine_comparisons (UNKNOWN_LOCATION,
2618 TRUTH_ORIF_EXPR, code1, code2,
2619 boolean_type_node, op1a, op1b);
2624 /* Likewise the swapped case of the above. */
2625 if (operand_equal_p (op1a, op2b, 0)
2626 && operand_equal_p (op1b, op2a, 0))
2628 tree t = combine_comparisons (UNKNOWN_LOCATION,
2629 TRUTH_ORIF_EXPR, code1,
2630 swap_tree_comparison (code2),
2631 boolean_type_node, op1a, op1b);
2636 /* If both comparisons are of the same value against constants, we might
2637 be able to merge them. */
2638 if (operand_equal_p (op1a, op2a, 0)
2639 && TREE_CODE (op1b) == INTEGER_CST
2640 && TREE_CODE (op2b) == INTEGER_CST)
2642 int cmp = tree_int_cst_compare (op1b, op2b);
2644 /* If we have (op1a != op1b), we should either be able to
2645 return that or TRUE, depending on whether the constant op1b
2646 also satisfies the other comparison against op2b. */
2647 if (code1 == NE_EXPR)
2653 case EQ_EXPR: val = (cmp == 0); break;
2654 case NE_EXPR: val = (cmp != 0); break;
2655 case LT_EXPR: val = (cmp < 0); break;
2656 case GT_EXPR: val = (cmp > 0); break;
2657 case LE_EXPR: val = (cmp <= 0); break;
2658 case GE_EXPR: val = (cmp >= 0); break;
2659 default: done = false;
2664 return boolean_true_node;
2666 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2669 /* Likewise if the second comparison is a != comparison. */
2670 else if (code2 == NE_EXPR)
2676 case EQ_EXPR: val = (cmp == 0); break;
2677 case NE_EXPR: val = (cmp != 0); break;
2678 case LT_EXPR: val = (cmp > 0); break;
2679 case GT_EXPR: val = (cmp < 0); break;
2680 case LE_EXPR: val = (cmp >= 0); break;
2681 case GE_EXPR: val = (cmp <= 0); break;
2682 default: done = false;
2687 return boolean_true_node;
2689 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2693 /* See if an equality test is redundant with the other comparison. */
2694 else if (code1 == EQ_EXPR)
2699 case EQ_EXPR: val = (cmp == 0); break;
2700 case NE_EXPR: val = (cmp != 0); break;
2701 case LT_EXPR: val = (cmp < 0); break;
2702 case GT_EXPR: val = (cmp > 0); break;
2703 case LE_EXPR: val = (cmp <= 0); break;
2704 case GE_EXPR: val = (cmp >= 0); break;
2709 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2711 else if (code2 == EQ_EXPR)
2716 case EQ_EXPR: val = (cmp == 0); break;
2717 case NE_EXPR: val = (cmp != 0); break;
2718 case LT_EXPR: val = (cmp > 0); break;
2719 case GT_EXPR: val = (cmp < 0); break;
2720 case LE_EXPR: val = (cmp >= 0); break;
2721 case GE_EXPR: val = (cmp <= 0); break;
2726 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2729 /* Chose the less restrictive of two < or <= comparisons. */
2730 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2731 && (code2 == LT_EXPR || code2 == LE_EXPR))
2733 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2734 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2736 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2739 /* Likewise chose the less restrictive of two > or >= comparisons. */
2740 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2741 && (code2 == GT_EXPR || code2 == GE_EXPR))
2743 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2744 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2746 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2749 /* Check for singleton ranges. */
2751 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2752 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2753 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2755 /* Check for less/greater pairs that don't restrict the range at all. */
2757 && (code1 == LT_EXPR || code1 == LE_EXPR)
2758 && (code2 == GT_EXPR || code2 == GE_EXPR))
2759 return boolean_true_node;
2761 && (code1 == GT_EXPR || code1 == GE_EXPR)
2762 && (code2 == LT_EXPR || code2 == LE_EXPR))
2763 return boolean_true_node;
2766 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2767 NAME's definition is a truth value. See if there are any simplifications
2768 that can be done against the NAME's definition. */
2769 if (TREE_CODE (op1a) == SSA_NAME
2770 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2771 && (integer_zerop (op1b) || integer_onep (op1b)))
2773 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2774 || (code1 == NE_EXPR && integer_onep (op1b)));
2775 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2776 switch (gimple_code (stmt))
2779 /* Try to simplify by copy-propagating the definition. */
2780 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2783 /* If every argument to the PHI produces the same result when
2784 ORed with the second comparison, we win.
2785 Do not do this unless the type is bool since we need a bool
2786 result here anyway. */
2787 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2789 tree result = NULL_TREE;
2791 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2793 tree arg = gimple_phi_arg_def (stmt, i);
2795 /* If this PHI has itself as an argument, ignore it.
2796 If all the other args produce the same result,
2798 if (arg == gimple_phi_result (stmt))
2800 else if (TREE_CODE (arg) == INTEGER_CST)
2802 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2805 result = boolean_true_node;
2806 else if (!integer_onep (result))
2810 result = fold_build2 (code2, boolean_type_node,
2812 else if (!same_bool_comparison_p (result,
2816 else if (TREE_CODE (arg) == SSA_NAME)
2818 tree temp = or_var_with_comparison (arg, invert,
2824 else if (!same_bool_result_p (result, temp))
2840 /* Try to simplify the OR of two comparisons, specified by
2841 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2842 If this can be simplified to a single expression (without requiring
2843 introducing more SSA variables to hold intermediate values),
2844 return the resulting tree. Otherwise return NULL_TREE.
2845 If the result expression is non-null, it has boolean type. */
2848 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2849 enum tree_code code2, tree op2a, tree op2b)
2851 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2855 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);