1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node || !node->analyzed || node->global.inlined_to)
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval)
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
128 if (TREE_CODE (cval) == ADDR_EXPR)
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
135 && !can_refer_decl_in_current_unit_p (base))
137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
138 add_referenced_var (base);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
150 get_symbol_constant_value (tree sym)
152 if (const_value_known_p (sym))
154 tree val = DECL_INITIAL (sym);
157 val = canonicalize_constructor_val (val);
158 if (val && is_gimple_min_invariant (val))
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
169 return build_zero_cst (TREE_TYPE (sym));
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
180 may_propagate_address_into_dereference (tree addr, tree deref)
182 gcc_assert (TREE_CODE (deref) == MEM_REF
183 && TREE_CODE (addr) == ADDR_EXPR);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
213 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
226 tree low_bound = array_ref_low_bound (base);
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound);
234 base = TREE_OPERAND (base, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
241 elt_type = TREE_TYPE (array_type);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type = ssizetype;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
253 if (integer_zerop (offset))
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
258 idx = build_int_cst (idx_type, 0);
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
294 min_idx = fold_convert (idx_type, min_idx);
296 if (TREE_CODE (min_idx) != INTEGER_CST)
299 elt_offset = fold_convert (idx_type, elt_offset);
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
310 /* We don't want to construct access past array bounds. For example
313 should not be simplified into (*c)[14] or tree-vrp will
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
327 else if (compare_tree_int (idx, 0) < 0)
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
345 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
351 if (TREE_CODE (base) != ADDR_EXPR)
354 base = TREE_OPERAND (base, 0);
355 if (types_compatible_p (orig_type, TREE_TYPE (base))
356 && integer_zerop (offset))
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
369 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
375 if (TREE_CODE (addr) != ADDR_EXPR)
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
384 SET_EXPR_LOCATION (ret, loc);
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
401 which may be able to propagate further. */
404 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
412 op0 = TREE_OPERAND (op0, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
448 else if (integer_onep (elsz)
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
471 && !handled_component_p (op0))
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
486 integer_zero_node, NULL_TREE));
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
502 if (TREE_CODE (array_idx) != INTEGER_CST)
504 if (TREE_CODE (elt_size) != INTEGER_CST)
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
511 min_idx = TYPE_MIN_VALUE (min_idx);
514 if (TREE_CODE (min_idx) != INTEGER_CST)
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size);
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
542 /* At which point we can try some of the same things as for indirects. */
543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
549 SET_EXPR_LOCATION (t, loc);
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
561 maybe_fold_reference (tree expr, bool is_lhs)
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
597 TREE_THIS_VOLATILE (tem) = volatile_p;
599 tem = maybe_fold_reference (expr, is_lhs);
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t, 0), 0))))
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
637 tree tem = maybe_fold_tmr (*t);
641 tem = maybe_fold_reference (expr, is_lhs);
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
658 fold_gimple_assign (gimple_stmt_iterator *si)
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
664 tree result = NULL_TREE;
666 switch (get_gimple_rhs_class (subcode))
668 case GIMPLE_SINGLE_RHS:
670 tree rhs = gimple_assign_rhs1 (stmt);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
675 tree op0 = COND_EXPR_COND (rhs);
678 location_t cond_loc = EXPR_LOCATION (rhs);
680 if (COMPARISON_CLASS_P (op0))
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
694 else if (is_gimple_min_invariant (op0))
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
748 /* If we couldn't fold the RHS, hand over to the generic
750 if (result == NULL_TREE)
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
758 if (result != rhs && valid_gimple_rhs_p (result))
765 case GIMPLE_UNARY_RHS:
767 tree rhs = gimple_assign_rhs1 (stmt);
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
817 /* Try to canonicalize for boolean-typed X the comparisons
818 X == 0, X == 1, X != 0, and X != 1. */
819 else if (gimple_assign_rhs_code (stmt) == EQ_EXPR
820 || gimple_assign_rhs_code (stmt) == NE_EXPR)
822 tree lhs = gimple_assign_lhs (stmt);
823 tree op1 = gimple_assign_rhs1 (stmt);
824 tree op2 = gimple_assign_rhs2 (stmt);
825 tree type = TREE_TYPE (op1);
827 /* Check whether the comparison operands are of the same boolean
828 type as the result type is.
829 Check that second operand is an integer-constant with value
831 if (TREE_CODE (op2) == INTEGER_CST
832 && (integer_zerop (op2) || integer_onep (op2))
833 && useless_type_conversion_p (TREE_TYPE (lhs), type))
835 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
836 bool is_logical_not = false;
838 /* X == 0 and X != 1 is a logical-not.of X
839 X == 1 and X != 0 is X */
840 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
841 || (cmp_code == NE_EXPR && integer_onep (op2)))
842 is_logical_not = true;
844 if (is_logical_not == false)
846 /* Only for one-bit precision typed X the transformation
847 !X -> ~X is valied. */
848 else if (TYPE_PRECISION (type) == 1)
849 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
851 /* Otherwise we use !X -> X ^ 1. */
853 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
854 type, op1, build_int_cst (type, 1));
860 result = fold_binary_loc (loc, subcode,
861 TREE_TYPE (gimple_assign_lhs (stmt)),
862 gimple_assign_rhs1 (stmt),
863 gimple_assign_rhs2 (stmt));
867 STRIP_USELESS_TYPE_CONVERSION (result);
868 if (valid_gimple_rhs_p (result))
873 case GIMPLE_TERNARY_RHS:
874 result = fold_ternary_loc (loc, subcode,
875 TREE_TYPE (gimple_assign_lhs (stmt)),
876 gimple_assign_rhs1 (stmt),
877 gimple_assign_rhs2 (stmt),
878 gimple_assign_rhs3 (stmt));
882 STRIP_USELESS_TYPE_CONVERSION (result);
883 if (valid_gimple_rhs_p (result))
888 case GIMPLE_INVALID_RHS:
895 /* Attempt to fold a conditional statement. Return true if any changes were
896 made. We only attempt to fold the condition expression, and do not perform
897 any transformation that would require alteration of the cfg. It is
898 assumed that the operands have been previously folded. */
901 fold_gimple_cond (gimple stmt)
903 tree result = fold_binary_loc (gimple_location (stmt),
904 gimple_cond_code (stmt),
906 gimple_cond_lhs (stmt),
907 gimple_cond_rhs (stmt));
911 STRIP_USELESS_TYPE_CONVERSION (result);
912 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
914 gimple_cond_set_condition_from_tree (stmt, result);
922 /* Convert EXPR into a GIMPLE value suitable for substitution on the
923 RHS of an assignment. Insert the necessary statements before
924 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
925 is replaced. If the call is expected to produces a result, then it
926 is replaced by an assignment of the new RHS to the result variable.
927 If the result is to be ignored, then the call is replaced by a
928 GIMPLE_NOP. A proper VDEF chain is retained by making the first
929 VUSE and the last VDEF of the whole sequence be the same as the replaced
930 statement and using new SSA names for stores in between. */
933 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
936 tree tmp = NULL_TREE; /* Silence warning. */
937 gimple stmt, new_stmt;
938 gimple_stmt_iterator i;
939 gimple_seq stmts = gimple_seq_alloc();
940 struct gimplify_ctx gctx;
942 gimple laststore = NULL;
945 stmt = gsi_stmt (*si_p);
947 gcc_assert (is_gimple_call (stmt));
949 lhs = gimple_call_lhs (stmt);
950 reaching_vuse = gimple_vuse (stmt);
952 push_gimplify_context (&gctx);
954 if (lhs == NULL_TREE)
956 gimplify_and_add (expr, &stmts);
957 /* We can end up with folding a memcpy of an empty class assignment
958 which gets optimized away by C++ gimplification. */
959 if (gimple_seq_empty_p (stmts))
961 pop_gimplify_context (NULL);
962 if (gimple_in_ssa_p (cfun))
964 unlink_stmt_vdef (stmt);
967 gsi_remove (si_p, true);
972 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
974 pop_gimplify_context (NULL);
976 if (gimple_has_location (stmt))
977 annotate_all_with_location (stmts, gimple_location (stmt));
979 /* The replacement can expose previously unreferenced variables. */
980 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
984 gsi_insert_before (si_p, last, GSI_NEW_STMT);
987 new_stmt = gsi_stmt (i);
988 if (gimple_in_ssa_p (cfun))
990 find_new_referenced_vars (new_stmt);
991 mark_symbols_for_renaming (new_stmt);
993 /* If the new statement has a VUSE, update it with exact SSA name we
994 know will reach this one. */
995 if (gimple_vuse (new_stmt))
997 /* If we've also seen a previous store create a new VDEF for
998 the latter one, and make that the new reaching VUSE. */
1001 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1002 gimple_set_vdef (laststore, reaching_vuse);
1003 update_stmt (laststore);
1006 gimple_set_vuse (new_stmt, reaching_vuse);
1007 gimple_set_modified (new_stmt, true);
1009 if (gimple_assign_single_p (new_stmt)
1010 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
1012 laststore = new_stmt;
1017 if (lhs == NULL_TREE)
1019 /* If we replace a call without LHS that has a VDEF and our new
1020 sequence ends with a store we must make that store have the same
1021 vdef in order not to break the sequencing. This can happen
1022 for instance when folding memcpy calls into assignments. */
1023 if (gimple_vdef (stmt) && laststore)
1025 gimple_set_vdef (laststore, gimple_vdef (stmt));
1026 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1027 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1028 update_stmt (laststore);
1030 else if (gimple_in_ssa_p (cfun))
1032 unlink_stmt_vdef (stmt);
1033 release_defs (stmt);
1041 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1044 if (laststore && is_gimple_reg (lhs))
1046 gimple_set_vdef (laststore, gimple_vdef (stmt));
1047 update_stmt (laststore);
1048 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1049 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1054 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1055 gimple_set_vdef (laststore, reaching_vuse);
1056 update_stmt (laststore);
1059 new_stmt = gimple_build_assign (lhs, tmp);
1060 if (!is_gimple_reg (tmp))
1061 gimple_set_vuse (new_stmt, reaching_vuse);
1062 if (!is_gimple_reg (lhs))
1064 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1065 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1066 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1068 else if (reaching_vuse == gimple_vuse (stmt))
1069 unlink_stmt_vdef (stmt);
1072 gimple_set_location (new_stmt, gimple_location (stmt));
1073 gsi_replace (si_p, new_stmt, false);
1076 /* Return the string length, maximum string length or maximum value of
1078 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1079 is not NULL and, for TYPE == 0, its value is not equal to the length
1080 we determine or if we are unable to determine the length or value,
1081 return false. VISITED is a bitmap of visited variables.
1082 TYPE is 0 if string length should be returned, 1 for maximum string
1083 length and 2 for maximum value ARG can have. */
1086 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1091 if (TREE_CODE (arg) != SSA_NAME)
1093 if (TREE_CODE (arg) == COND_EXPR)
1094 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1095 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1096 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1097 else if (TREE_CODE (arg) == ADDR_EXPR
1098 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1099 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1101 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1102 if (TREE_CODE (aop0) == INDIRECT_REF
1103 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1104 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1105 length, visited, type);
1111 if (TREE_CODE (val) != INTEGER_CST
1112 || tree_int_cst_sgn (val) < 0)
1116 val = c_strlen (arg, 1);
1124 if (TREE_CODE (*length) != INTEGER_CST
1125 || TREE_CODE (val) != INTEGER_CST)
1128 if (tree_int_cst_lt (*length, val))
1132 else if (simple_cst_equal (val, *length) != 1)
1140 /* If we were already here, break the infinite cycle. */
1141 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1145 def_stmt = SSA_NAME_DEF_STMT (var);
1147 switch (gimple_code (def_stmt))
1150 /* The RHS of the statement defining VAR must either have a
1151 constant length or come from another SSA_NAME with a constant
1153 if (gimple_assign_single_p (def_stmt)
1154 || gimple_assign_unary_nop_p (def_stmt))
1156 tree rhs = gimple_assign_rhs1 (def_stmt);
1157 return get_maxval_strlen (rhs, length, visited, type);
1163 /* All the arguments of the PHI node must have the same constant
1167 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1169 tree arg = gimple_phi_arg (def_stmt, i)->def;
1171 /* If this PHI has itself as an argument, we cannot
1172 determine the string length of this argument. However,
1173 if we can find a constant string length for the other
1174 PHI args then we can still be sure that this is a
1175 constant string length. So be optimistic and just
1176 continue with the next argument. */
1177 if (arg == gimple_phi_result (def_stmt))
1180 if (!get_maxval_strlen (arg, length, visited, type))
1192 /* Fold builtin call in statement STMT. Returns a simplified tree.
1193 We may return a non-constant expression, including another call
1194 to a different function and with different arguments, e.g.,
1195 substituting memcpy for strcpy when the string length is known.
1196 Note that some builtins expand into inline code that may not
1197 be valid in GIMPLE. Callers must take care. */
1200 gimple_fold_builtin (gimple stmt)
1202 tree result, val[3];
1208 location_t loc = gimple_location (stmt);
1210 gcc_assert (is_gimple_call (stmt));
1212 ignore = (gimple_call_lhs (stmt) == NULL);
1214 /* First try the generic builtin folder. If that succeeds, return the
1216 result = fold_call_stmt (stmt, ignore);
1220 STRIP_NOPS (result);
1224 /* Ignore MD builtins. */
1225 callee = gimple_call_fndecl (stmt);
1226 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1229 /* If the builtin could not be folded, and it has no argument list,
1231 nargs = gimple_call_num_args (stmt);
1235 /* Limit the work only for builtins we know how to simplify. */
1236 switch (DECL_FUNCTION_CODE (callee))
1238 case BUILT_IN_STRLEN:
1239 case BUILT_IN_FPUTS:
1240 case BUILT_IN_FPUTS_UNLOCKED:
1244 case BUILT_IN_STRCPY:
1245 case BUILT_IN_STRNCPY:
1249 case BUILT_IN_MEMCPY_CHK:
1250 case BUILT_IN_MEMPCPY_CHK:
1251 case BUILT_IN_MEMMOVE_CHK:
1252 case BUILT_IN_MEMSET_CHK:
1253 case BUILT_IN_STRNCPY_CHK:
1257 case BUILT_IN_STRCPY_CHK:
1258 case BUILT_IN_STPCPY_CHK:
1262 case BUILT_IN_SNPRINTF_CHK:
1263 case BUILT_IN_VSNPRINTF_CHK:
1271 if (arg_idx >= nargs)
1274 /* Try to use the dataflow information gathered by the CCP process. */
1275 visited = BITMAP_ALLOC (NULL);
1276 bitmap_clear (visited);
1278 memset (val, 0, sizeof (val));
1279 a = gimple_call_arg (stmt, arg_idx);
1280 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1281 val[arg_idx] = NULL_TREE;
1283 BITMAP_FREE (visited);
1286 switch (DECL_FUNCTION_CODE (callee))
1288 case BUILT_IN_STRLEN:
1289 if (val[0] && nargs == 1)
1292 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1294 /* If the result is not a valid gimple value, or not a cast
1295 of a valid gimple value, then we cannot use the result. */
1296 if (is_gimple_val (new_val)
1297 || (CONVERT_EXPR_P (new_val)
1298 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1303 case BUILT_IN_STRCPY:
1304 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1305 result = fold_builtin_strcpy (loc, callee,
1306 gimple_call_arg (stmt, 0),
1307 gimple_call_arg (stmt, 1),
1311 case BUILT_IN_STRNCPY:
1312 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1313 result = fold_builtin_strncpy (loc, callee,
1314 gimple_call_arg (stmt, 0),
1315 gimple_call_arg (stmt, 1),
1316 gimple_call_arg (stmt, 2),
1320 case BUILT_IN_FPUTS:
1322 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 ignore, false, val[0]);
1327 case BUILT_IN_FPUTS_UNLOCKED:
1329 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1330 gimple_call_arg (stmt, 1),
1331 ignore, true, val[0]);
1334 case BUILT_IN_MEMCPY_CHK:
1335 case BUILT_IN_MEMPCPY_CHK:
1336 case BUILT_IN_MEMMOVE_CHK:
1337 case BUILT_IN_MEMSET_CHK:
1338 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1339 result = fold_builtin_memory_chk (loc, callee,
1340 gimple_call_arg (stmt, 0),
1341 gimple_call_arg (stmt, 1),
1342 gimple_call_arg (stmt, 2),
1343 gimple_call_arg (stmt, 3),
1345 DECL_FUNCTION_CODE (callee));
1348 case BUILT_IN_STRCPY_CHK:
1349 case BUILT_IN_STPCPY_CHK:
1350 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1351 result = fold_builtin_stxcpy_chk (loc, callee,
1352 gimple_call_arg (stmt, 0),
1353 gimple_call_arg (stmt, 1),
1354 gimple_call_arg (stmt, 2),
1356 DECL_FUNCTION_CODE (callee));
1359 case BUILT_IN_STRNCPY_CHK:
1360 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1361 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1362 gimple_call_arg (stmt, 1),
1363 gimple_call_arg (stmt, 2),
1364 gimple_call_arg (stmt, 3),
1368 case BUILT_IN_SNPRINTF_CHK:
1369 case BUILT_IN_VSNPRINTF_CHK:
1370 if (val[1] && is_gimple_val (val[1]))
1371 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1372 DECL_FUNCTION_CODE (callee));
1379 if (result && ignore)
1380 result = fold_ignored_result (result);
1384 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1385 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1386 KNOWN_BINFO carries the binfo describing the true type of
1387 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1388 with a this adjustment, the constant which should be added to this pointer
1389 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1390 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1393 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1399 v = BINFO_VIRTUALS (known_binfo);
1400 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1406 i += (TARGET_VTABLE_USES_DESCRIPTORS
1407 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1411 /* If BV_VCALL_INDEX is non-NULL, give up. */
1415 fndecl = TREE_VALUE (v);
1417 /* When cgraph node is missing and function is not public, we cannot
1418 devirtualize. This can happen in WHOPR when the actual method
1419 ends up in other partition, because we found devirtualization
1420 possibility too late. */
1421 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1424 *delta = TREE_PURPOSE (v);
1425 gcc_checking_assert (host_integerp (*delta, 0));
1429 /* Generate code adjusting the first parameter of a call statement determined
1433 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1435 gimple call_stmt = gsi_stmt (*gsi);
1439 delta = convert_to_ptrofftype (delta);
1440 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1441 parm = gimple_call_arg (call_stmt, 0);
1442 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1443 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1444 add_referenced_var (tmp);
1446 tmp = make_ssa_name (tmp, NULL);
1447 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1448 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1449 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1450 gimple_call_set_arg (call_stmt, 0, tmp);
1453 /* Return a binfo to be used for devirtualization of calls based on an object
1454 represented by a declaration (i.e. a global or automatically allocated one)
1455 or NULL if it cannot be found or is not safe. CST is expected to be an
1456 ADDR_EXPR of such object or the function will return NULL. Currently it is
1457 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1460 gimple_extract_devirt_binfo_from_cst (tree cst)
1462 HOST_WIDE_INT offset, size, max_size;
1463 tree base, type, expected_type, binfo;
1464 bool last_artificial = false;
1466 if (!flag_devirtualize
1467 || TREE_CODE (cst) != ADDR_EXPR
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1471 cst = TREE_OPERAND (cst, 0);
1472 expected_type = TREE_TYPE (cst);
1473 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1474 type = TREE_TYPE (base);
1478 || TREE_CODE (type) != RECORD_TYPE)
1481 /* Find the sub-object the constant actually refers to and mark whether it is
1482 an artificial one (as opposed to a user-defined one). */
1485 HOST_WIDE_INT pos, size;
1488 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1493 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1495 if (TREE_CODE (fld) != FIELD_DECL)
1498 pos = int_bit_position (fld);
1499 size = tree_low_cst (DECL_SIZE (fld), 1);
1500 if (pos <= offset && (pos + size) > offset)
1503 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1506 last_artificial = DECL_ARTIFICIAL (fld);
1507 type = TREE_TYPE (fld);
1510 /* Artifical sub-objects are ancestors, we do not want to use them for
1511 devirtualization, at least not here. */
1512 if (last_artificial)
1514 binfo = TYPE_BINFO (type);
1515 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1521 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1522 The statement may be replaced by another statement, e.g., if the call
1523 simplifies to a constant value. Return true if any changes were made.
1524 It is assumed that the operands have been previously folded. */
1527 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1529 gimple stmt = gsi_stmt (*gsi);
1532 /* Check for builtins that CCP can handle using information not
1533 available in the generic fold routines. */
1534 callee = gimple_call_fndecl (stmt);
1535 if (!inplace && callee && DECL_BUILT_IN (callee))
1537 tree result = gimple_fold_builtin (stmt);
1541 if (!update_call_from_tree (gsi, result))
1542 gimplify_and_update_call_from_tree (gsi, result);
1547 /* Check for virtual calls that became direct calls. */
1548 callee = gimple_call_fn (stmt);
1549 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1551 tree binfo, fndecl, delta, obj;
1552 HOST_WIDE_INT token;
1554 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1556 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1560 obj = OBJ_TYPE_REF_OBJECT (callee);
1561 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1564 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1565 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta);
1568 gcc_assert (integer_zerop (delta));
1569 gimple_call_set_fndecl (stmt, fndecl);
1576 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1577 distinguishes both cases. */
1580 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1582 bool changed = false;
1583 gimple stmt = gsi_stmt (*gsi);
1585 gimple_stmt_iterator gsinext = *gsi;
1588 gsi_next (&gsinext);
1589 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1591 /* Fold the main computation performed by the statement. */
1592 switch (gimple_code (stmt))
1596 unsigned old_num_ops = gimple_num_ops (stmt);
1597 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1598 tree lhs = gimple_assign_lhs (stmt);
1600 /* First canonicalize operand order. This avoids building new
1601 trees if this is the only thing fold would later do. */
1602 if ((commutative_tree_code (subcode)
1603 || commutative_ternary_tree_code (subcode))
1604 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1605 gimple_assign_rhs2 (stmt), false))
1607 tree tem = gimple_assign_rhs1 (stmt);
1608 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1609 gimple_assign_set_rhs2 (stmt, tem);
1612 new_rhs = fold_gimple_assign (gsi);
1614 && !useless_type_conversion_p (TREE_TYPE (lhs),
1615 TREE_TYPE (new_rhs)))
1616 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1619 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1621 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1628 changed |= fold_gimple_cond (stmt);
1632 /* Fold *& in call arguments. */
1633 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1634 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1636 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1639 gimple_call_set_arg (stmt, i, tmp);
1643 changed |= gimple_fold_call (gsi, inplace);
1647 /* Fold *& in asm operands. */
1648 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1650 tree link = gimple_asm_output_op (stmt, i);
1651 tree op = TREE_VALUE (link);
1652 if (REFERENCE_CLASS_P (op)
1653 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1655 TREE_VALUE (link) = op;
1659 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1661 tree link = gimple_asm_input_op (stmt, i);
1662 tree op = TREE_VALUE (link);
1663 if (REFERENCE_CLASS_P (op)
1664 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1666 TREE_VALUE (link) = op;
1673 if (gimple_debug_bind_p (stmt))
1675 tree val = gimple_debug_bind_get_value (stmt);
1677 && REFERENCE_CLASS_P (val))
1679 tree tem = maybe_fold_reference (val, false);
1682 gimple_debug_bind_set_value (stmt, tem);
1692 /* If stmt folds into nothing and it was the last stmt in a bb,
1693 don't call gsi_stmt. */
1694 if (gsi_end_p (*gsi))
1696 gcc_assert (next_stmt == NULL);
1700 stmt = gsi_stmt (*gsi);
1702 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1703 as we'd changing the next stmt. */
1704 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1706 tree lhs = gimple_get_lhs (stmt);
1707 if (lhs && REFERENCE_CLASS_P (lhs))
1709 tree new_lhs = maybe_fold_reference (lhs, true);
1712 gimple_set_lhs (stmt, new_lhs);
1721 /* Fold the statement pointed to by GSI. In some cases, this function may
1722 replace the whole statement with a new one. Returns true iff folding
1724 The statement pointed to by GSI should be in valid gimple form but may
1725 be in unfolded state as resulting from for example constant propagation
1726 which can produce *&x = 0. */
1729 fold_stmt (gimple_stmt_iterator *gsi)
1731 return fold_stmt_1 (gsi, false);
1734 /* Perform the minimal folding on statement STMT. Only operations like
1735 *&x created by constant propagation are handled. The statement cannot
1736 be replaced with a new one. Return true if the statement was
1737 changed, false otherwise.
1738 The statement STMT should be in valid gimple form but may
1739 be in unfolded state as resulting from for example constant propagation
1740 which can produce *&x = 0. */
1743 fold_stmt_inplace (gimple stmt)
1745 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1746 bool changed = fold_stmt_1 (&gsi, true);
1747 gcc_assert (gsi_stmt (gsi) == stmt);
1751 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1752 if EXPR is null or we don't know how.
1753 If non-null, the result always has boolean type. */
1756 canonicalize_bool (tree expr, bool invert)
1762 if (integer_nonzerop (expr))
1763 return boolean_false_node;
1764 else if (integer_zerop (expr))
1765 return boolean_true_node;
1766 else if (TREE_CODE (expr) == SSA_NAME)
1767 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1768 build_int_cst (TREE_TYPE (expr), 0));
1769 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1770 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1772 TREE_OPERAND (expr, 0),
1773 TREE_OPERAND (expr, 1));
1779 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1781 if (integer_nonzerop (expr))
1782 return boolean_true_node;
1783 else if (integer_zerop (expr))
1784 return boolean_false_node;
1785 else if (TREE_CODE (expr) == SSA_NAME)
1786 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1787 build_int_cst (TREE_TYPE (expr), 0));
1788 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1789 return fold_build2 (TREE_CODE (expr),
1791 TREE_OPERAND (expr, 0),
1792 TREE_OPERAND (expr, 1));
1798 /* Check to see if a boolean expression EXPR is logically equivalent to the
1799 comparison (OP1 CODE OP2). Check for various identities involving
1803 same_bool_comparison_p (const_tree expr, enum tree_code code,
1804 const_tree op1, const_tree op2)
1808 /* The obvious case. */
1809 if (TREE_CODE (expr) == code
1810 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1811 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1814 /* Check for comparing (name, name != 0) and the case where expr
1815 is an SSA_NAME with a definition matching the comparison. */
1816 if (TREE_CODE (expr) == SSA_NAME
1817 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1819 if (operand_equal_p (expr, op1, 0))
1820 return ((code == NE_EXPR && integer_zerop (op2))
1821 || (code == EQ_EXPR && integer_nonzerop (op2)));
1822 s = SSA_NAME_DEF_STMT (expr);
1823 if (is_gimple_assign (s)
1824 && gimple_assign_rhs_code (s) == code
1825 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1826 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1830 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1831 of name is a comparison, recurse. */
1832 if (TREE_CODE (op1) == SSA_NAME
1833 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1835 s = SSA_NAME_DEF_STMT (op1);
1836 if (is_gimple_assign (s)
1837 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1839 enum tree_code c = gimple_assign_rhs_code (s);
1840 if ((c == NE_EXPR && integer_zerop (op2))
1841 || (c == EQ_EXPR && integer_nonzerop (op2)))
1842 return same_bool_comparison_p (expr, c,
1843 gimple_assign_rhs1 (s),
1844 gimple_assign_rhs2 (s));
1845 if ((c == EQ_EXPR && integer_zerop (op2))
1846 || (c == NE_EXPR && integer_nonzerop (op2)))
1847 return same_bool_comparison_p (expr,
1848 invert_tree_comparison (c, false),
1849 gimple_assign_rhs1 (s),
1850 gimple_assign_rhs2 (s));
1856 /* Check to see if two boolean expressions OP1 and OP2 are logically
1860 same_bool_result_p (const_tree op1, const_tree op2)
1862 /* Simple cases first. */
1863 if (operand_equal_p (op1, op2, 0))
1866 /* Check the cases where at least one of the operands is a comparison.
1867 These are a bit smarter than operand_equal_p in that they apply some
1868 identifies on SSA_NAMEs. */
1869 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1870 && same_bool_comparison_p (op1, TREE_CODE (op2),
1871 TREE_OPERAND (op2, 0),
1872 TREE_OPERAND (op2, 1)))
1874 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1875 && same_bool_comparison_p (op2, TREE_CODE (op1),
1876 TREE_OPERAND (op1, 0),
1877 TREE_OPERAND (op1, 1)))
1884 /* Forward declarations for some mutually recursive functions. */
1887 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1888 enum tree_code code2, tree op2a, tree op2b);
1890 and_var_with_comparison (tree var, bool invert,
1891 enum tree_code code2, tree op2a, tree op2b);
1893 and_var_with_comparison_1 (gimple stmt,
1894 enum tree_code code2, tree op2a, tree op2b);
1896 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1897 enum tree_code code2, tree op2a, tree op2b);
1899 or_var_with_comparison (tree var, bool invert,
1900 enum tree_code code2, tree op2a, tree op2b);
1902 or_var_with_comparison_1 (gimple stmt,
1903 enum tree_code code2, tree op2a, tree op2b);
1905 /* Helper function for and_comparisons_1: try to simplify the AND of the
1906 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1907 If INVERT is true, invert the value of the VAR before doing the AND.
1908 Return NULL_EXPR if we can't simplify this to a single expression. */
1911 and_var_with_comparison (tree var, bool invert,
1912 enum tree_code code2, tree op2a, tree op2b)
1915 gimple stmt = SSA_NAME_DEF_STMT (var);
1917 /* We can only deal with variables whose definitions are assignments. */
1918 if (!is_gimple_assign (stmt))
1921 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1922 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1923 Then we only have to consider the simpler non-inverted cases. */
1925 t = or_var_with_comparison_1 (stmt,
1926 invert_tree_comparison (code2, false),
1929 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1930 return canonicalize_bool (t, invert);
1933 /* Try to simplify the AND of the ssa variable defined by the assignment
1934 STMT with the comparison specified by (OP2A CODE2 OP2B).
1935 Return NULL_EXPR if we can't simplify this to a single expression. */
1938 and_var_with_comparison_1 (gimple stmt,
1939 enum tree_code code2, tree op2a, tree op2b)
1941 tree var = gimple_assign_lhs (stmt);
1942 tree true_test_var = NULL_TREE;
1943 tree false_test_var = NULL_TREE;
1944 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1946 /* Check for identities like (var AND (var == 0)) => false. */
1947 if (TREE_CODE (op2a) == SSA_NAME
1948 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1950 if ((code2 == NE_EXPR && integer_zerop (op2b))
1951 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1953 true_test_var = op2a;
1954 if (var == true_test_var)
1957 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1958 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1960 false_test_var = op2a;
1961 if (var == false_test_var)
1962 return boolean_false_node;
1966 /* If the definition is a comparison, recurse on it. */
1967 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1969 tree t = and_comparisons_1 (innercode,
1970 gimple_assign_rhs1 (stmt),
1971 gimple_assign_rhs2 (stmt),
1979 /* If the definition is an AND or OR expression, we may be able to
1980 simplify by reassociating. */
1981 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1982 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1984 tree inner1 = gimple_assign_rhs1 (stmt);
1985 tree inner2 = gimple_assign_rhs2 (stmt);
1988 tree partial = NULL_TREE;
1989 bool is_and = (innercode == BIT_AND_EXPR);
1991 /* Check for boolean identities that don't require recursive examination
1993 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1994 inner1 AND (inner1 OR inner2) => inner1
1995 !inner1 AND (inner1 AND inner2) => false
1996 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1997 Likewise for similar cases involving inner2. */
1998 if (inner1 == true_test_var)
1999 return (is_and ? var : inner1);
2000 else if (inner2 == true_test_var)
2001 return (is_and ? var : inner2);
2002 else if (inner1 == false_test_var)
2004 ? boolean_false_node
2005 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
2006 else if (inner2 == false_test_var)
2008 ? boolean_false_node
2009 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
2011 /* Next, redistribute/reassociate the AND across the inner tests.
2012 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2013 if (TREE_CODE (inner1) == SSA_NAME
2014 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2015 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2016 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2017 gimple_assign_rhs1 (s),
2018 gimple_assign_rhs2 (s),
2019 code2, op2a, op2b)))
2021 /* Handle the AND case, where we are reassociating:
2022 (inner1 AND inner2) AND (op2a code2 op2b)
2024 If the partial result t is a constant, we win. Otherwise
2025 continue on to try reassociating with the other inner test. */
2028 if (integer_onep (t))
2030 else if (integer_zerop (t))
2031 return boolean_false_node;
2034 /* Handle the OR case, where we are redistributing:
2035 (inner1 OR inner2) AND (op2a code2 op2b)
2036 => (t OR (inner2 AND (op2a code2 op2b))) */
2037 else if (integer_onep (t))
2038 return boolean_true_node;
2040 /* Save partial result for later. */
2044 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2045 if (TREE_CODE (inner2) == SSA_NAME
2046 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2047 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2048 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2049 gimple_assign_rhs1 (s),
2050 gimple_assign_rhs2 (s),
2051 code2, op2a, op2b)))
2053 /* Handle the AND case, where we are reassociating:
2054 (inner1 AND inner2) AND (op2a code2 op2b)
2055 => (inner1 AND t) */
2058 if (integer_onep (t))
2060 else if (integer_zerop (t))
2061 return boolean_false_node;
2062 /* If both are the same, we can apply the identity
2064 else if (partial && same_bool_result_p (t, partial))
2068 /* Handle the OR case. where we are redistributing:
2069 (inner1 OR inner2) AND (op2a code2 op2b)
2070 => (t OR (inner1 AND (op2a code2 op2b)))
2071 => (t OR partial) */
2074 if (integer_onep (t))
2075 return boolean_true_node;
2078 /* We already got a simplification for the other
2079 operand to the redistributed OR expression. The
2080 interesting case is when at least one is false.
2081 Or, if both are the same, we can apply the identity
2083 if (integer_zerop (partial))
2085 else if (integer_zerop (t))
2087 else if (same_bool_result_p (t, partial))
2096 /* Try to simplify the AND of two comparisons defined by
2097 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2098 If this can be done without constructing an intermediate value,
2099 return the resulting tree; otherwise NULL_TREE is returned.
2100 This function is deliberately asymmetric as it recurses on SSA_DEFs
2101 in the first comparison but not the second. */
2104 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2105 enum tree_code code2, tree op2a, tree op2b)
2107 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2108 if (operand_equal_p (op1a, op2a, 0)
2109 && operand_equal_p (op1b, op2b, 0))
2111 /* Result will be either NULL_TREE, or a combined comparison. */
2112 tree t = combine_comparisons (UNKNOWN_LOCATION,
2113 TRUTH_ANDIF_EXPR, code1, code2,
2114 boolean_type_node, op1a, op1b);
2119 /* Likewise the swapped case of the above. */
2120 if (operand_equal_p (op1a, op2b, 0)
2121 && operand_equal_p (op1b, op2a, 0))
2123 /* Result will be either NULL_TREE, or a combined comparison. */
2124 tree t = combine_comparisons (UNKNOWN_LOCATION,
2125 TRUTH_ANDIF_EXPR, code1,
2126 swap_tree_comparison (code2),
2127 boolean_type_node, op1a, op1b);
2132 /* If both comparisons are of the same value against constants, we might
2133 be able to merge them. */
2134 if (operand_equal_p (op1a, op2a, 0)
2135 && TREE_CODE (op1b) == INTEGER_CST
2136 && TREE_CODE (op2b) == INTEGER_CST)
2138 int cmp = tree_int_cst_compare (op1b, op2b);
2140 /* If we have (op1a == op1b), we should either be able to
2141 return that or FALSE, depending on whether the constant op1b
2142 also satisfies the other comparison against op2b. */
2143 if (code1 == EQ_EXPR)
2149 case EQ_EXPR: val = (cmp == 0); break;
2150 case NE_EXPR: val = (cmp != 0); break;
2151 case LT_EXPR: val = (cmp < 0); break;
2152 case GT_EXPR: val = (cmp > 0); break;
2153 case LE_EXPR: val = (cmp <= 0); break;
2154 case GE_EXPR: val = (cmp >= 0); break;
2155 default: done = false;
2160 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2162 return boolean_false_node;
2165 /* Likewise if the second comparison is an == comparison. */
2166 else if (code2 == EQ_EXPR)
2172 case EQ_EXPR: val = (cmp == 0); break;
2173 case NE_EXPR: val = (cmp != 0); break;
2174 case LT_EXPR: val = (cmp > 0); break;
2175 case GT_EXPR: val = (cmp < 0); break;
2176 case LE_EXPR: val = (cmp >= 0); break;
2177 case GE_EXPR: val = (cmp <= 0); break;
2178 default: done = false;
2183 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2185 return boolean_false_node;
2189 /* Same business with inequality tests. */
2190 else if (code1 == NE_EXPR)
2195 case EQ_EXPR: val = (cmp != 0); break;
2196 case NE_EXPR: val = (cmp == 0); break;
2197 case LT_EXPR: val = (cmp >= 0); break;
2198 case GT_EXPR: val = (cmp <= 0); break;
2199 case LE_EXPR: val = (cmp > 0); break;
2200 case GE_EXPR: val = (cmp < 0); break;
2205 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2207 else if (code2 == NE_EXPR)
2212 case EQ_EXPR: val = (cmp == 0); break;
2213 case NE_EXPR: val = (cmp != 0); break;
2214 case LT_EXPR: val = (cmp <= 0); break;
2215 case GT_EXPR: val = (cmp >= 0); break;
2216 case LE_EXPR: val = (cmp < 0); break;
2217 case GE_EXPR: val = (cmp > 0); break;
2222 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2225 /* Chose the more restrictive of two < or <= comparisons. */
2226 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2227 && (code2 == LT_EXPR || code2 == LE_EXPR))
2229 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2230 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2232 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2235 /* Likewise chose the more restrictive of two > or >= comparisons. */
2236 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2237 && (code2 == GT_EXPR || code2 == GE_EXPR))
2239 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2240 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2245 /* Check for singleton ranges. */
2247 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2248 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2249 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2251 /* Check for disjoint ranges. */
2253 && (code1 == LT_EXPR || code1 == LE_EXPR)
2254 && (code2 == GT_EXPR || code2 == GE_EXPR))
2255 return boolean_false_node;
2257 && (code1 == GT_EXPR || code1 == GE_EXPR)
2258 && (code2 == LT_EXPR || code2 == LE_EXPR))
2259 return boolean_false_node;
2262 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2263 NAME's definition is a truth value. See if there are any simplifications
2264 that can be done against the NAME's definition. */
2265 if (TREE_CODE (op1a) == SSA_NAME
2266 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2267 && (integer_zerop (op1b) || integer_onep (op1b)))
2269 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2270 || (code1 == NE_EXPR && integer_onep (op1b)));
2271 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2272 switch (gimple_code (stmt))
2275 /* Try to simplify by copy-propagating the definition. */
2276 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2279 /* If every argument to the PHI produces the same result when
2280 ANDed with the second comparison, we win.
2281 Do not do this unless the type is bool since we need a bool
2282 result here anyway. */
2283 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2285 tree result = NULL_TREE;
2287 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2289 tree arg = gimple_phi_arg_def (stmt, i);
2291 /* If this PHI has itself as an argument, ignore it.
2292 If all the other args produce the same result,
2294 if (arg == gimple_phi_result (stmt))
2296 else if (TREE_CODE (arg) == INTEGER_CST)
2298 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2301 result = boolean_false_node;
2302 else if (!integer_zerop (result))
2306 result = fold_build2 (code2, boolean_type_node,
2308 else if (!same_bool_comparison_p (result,
2312 else if (TREE_CODE (arg) == SSA_NAME
2313 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2316 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2317 /* In simple cases we can look through PHI nodes,
2318 but we have to be careful with loops.
2320 if (! dom_info_available_p (CDI_DOMINATORS)
2321 || gimple_bb (def_stmt) == gimple_bb (stmt)
2322 || dominated_by_p (CDI_DOMINATORS,
2323 gimple_bb (def_stmt),
2326 temp = and_var_with_comparison (arg, invert, code2,
2332 else if (!same_bool_result_p (result, temp))
2348 /* Try to simplify the AND of two comparisons, specified by
2349 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2350 If this can be simplified to a single expression (without requiring
2351 introducing more SSA variables to hold intermediate values),
2352 return the resulting tree. Otherwise return NULL_TREE.
2353 If the result expression is non-null, it has boolean type. */
2356 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2357 enum tree_code code2, tree op2a, tree op2b)
2359 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2363 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2366 /* Helper function for or_comparisons_1: try to simplify the OR of the
2367 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2368 If INVERT is true, invert the value of VAR before doing the OR.
2369 Return NULL_EXPR if we can't simplify this to a single expression. */
2372 or_var_with_comparison (tree var, bool invert,
2373 enum tree_code code2, tree op2a, tree op2b)
2376 gimple stmt = SSA_NAME_DEF_STMT (var);
2378 /* We can only deal with variables whose definitions are assignments. */
2379 if (!is_gimple_assign (stmt))
2382 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2383 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2384 Then we only have to consider the simpler non-inverted cases. */
2386 t = and_var_with_comparison_1 (stmt,
2387 invert_tree_comparison (code2, false),
2390 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2391 return canonicalize_bool (t, invert);
2394 /* Try to simplify the OR of the ssa variable defined by the assignment
2395 STMT with the comparison specified by (OP2A CODE2 OP2B).
2396 Return NULL_EXPR if we can't simplify this to a single expression. */
2399 or_var_with_comparison_1 (gimple stmt,
2400 enum tree_code code2, tree op2a, tree op2b)
2402 tree var = gimple_assign_lhs (stmt);
2403 tree true_test_var = NULL_TREE;
2404 tree false_test_var = NULL_TREE;
2405 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2407 /* Check for identities like (var OR (var != 0)) => true . */
2408 if (TREE_CODE (op2a) == SSA_NAME
2409 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2411 if ((code2 == NE_EXPR && integer_zerop (op2b))
2412 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2414 true_test_var = op2a;
2415 if (var == true_test_var)
2418 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2419 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2421 false_test_var = op2a;
2422 if (var == false_test_var)
2423 return boolean_true_node;
2427 /* If the definition is a comparison, recurse on it. */
2428 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2430 tree t = or_comparisons_1 (innercode,
2431 gimple_assign_rhs1 (stmt),
2432 gimple_assign_rhs2 (stmt),
2440 /* If the definition is an AND or OR expression, we may be able to
2441 simplify by reassociating. */
2442 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2443 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2445 tree inner1 = gimple_assign_rhs1 (stmt);
2446 tree inner2 = gimple_assign_rhs2 (stmt);
2449 tree partial = NULL_TREE;
2450 bool is_or = (innercode == BIT_IOR_EXPR);
2452 /* Check for boolean identities that don't require recursive examination
2454 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2455 inner1 OR (inner1 AND inner2) => inner1
2456 !inner1 OR (inner1 OR inner2) => true
2457 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2459 if (inner1 == true_test_var)
2460 return (is_or ? var : inner1);
2461 else if (inner2 == true_test_var)
2462 return (is_or ? var : inner2);
2463 else if (inner1 == false_test_var)
2466 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2467 else if (inner2 == false_test_var)
2470 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2472 /* Next, redistribute/reassociate the OR across the inner tests.
2473 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2474 if (TREE_CODE (inner1) == SSA_NAME
2475 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2476 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2477 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2478 gimple_assign_rhs1 (s),
2479 gimple_assign_rhs2 (s),
2480 code2, op2a, op2b)))
2482 /* Handle the OR case, where we are reassociating:
2483 (inner1 OR inner2) OR (op2a code2 op2b)
2485 If the partial result t is a constant, we win. Otherwise
2486 continue on to try reassociating with the other inner test. */
2489 if (integer_onep (t))
2490 return boolean_true_node;
2491 else if (integer_zerop (t))
2495 /* Handle the AND case, where we are redistributing:
2496 (inner1 AND inner2) OR (op2a code2 op2b)
2497 => (t AND (inner2 OR (op2a code op2b))) */
2498 else if (integer_zerop (t))
2499 return boolean_false_node;
2501 /* Save partial result for later. */
2505 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2506 if (TREE_CODE (inner2) == SSA_NAME
2507 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2508 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2509 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2510 gimple_assign_rhs1 (s),
2511 gimple_assign_rhs2 (s),
2512 code2, op2a, op2b)))
2514 /* Handle the OR case, where we are reassociating:
2515 (inner1 OR inner2) OR (op2a code2 op2b)
2517 => (t OR partial) */
2520 if (integer_zerop (t))
2522 else if (integer_onep (t))
2523 return boolean_true_node;
2524 /* If both are the same, we can apply the identity
2526 else if (partial && same_bool_result_p (t, partial))
2530 /* Handle the AND case, where we are redistributing:
2531 (inner1 AND inner2) OR (op2a code2 op2b)
2532 => (t AND (inner1 OR (op2a code2 op2b)))
2533 => (t AND partial) */
2536 if (integer_zerop (t))
2537 return boolean_false_node;
2540 /* We already got a simplification for the other
2541 operand to the redistributed AND expression. The
2542 interesting case is when at least one is true.
2543 Or, if both are the same, we can apply the identity
2545 if (integer_onep (partial))
2547 else if (integer_onep (t))
2549 else if (same_bool_result_p (t, partial))
2558 /* Try to simplify the OR of two comparisons defined by
2559 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2560 If this can be done without constructing an intermediate value,
2561 return the resulting tree; otherwise NULL_TREE is returned.
2562 This function is deliberately asymmetric as it recurses on SSA_DEFs
2563 in the first comparison but not the second. */
2566 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2567 enum tree_code code2, tree op2a, tree op2b)
2569 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2570 if (operand_equal_p (op1a, op2a, 0)
2571 && operand_equal_p (op1b, op2b, 0))
2573 /* Result will be either NULL_TREE, or a combined comparison. */
2574 tree t = combine_comparisons (UNKNOWN_LOCATION,
2575 TRUTH_ORIF_EXPR, code1, code2,
2576 boolean_type_node, op1a, op1b);
2581 /* Likewise the swapped case of the above. */
2582 if (operand_equal_p (op1a, op2b, 0)
2583 && operand_equal_p (op1b, op2a, 0))
2585 /* Result will be either NULL_TREE, or a combined comparison. */
2586 tree t = combine_comparisons (UNKNOWN_LOCATION,
2587 TRUTH_ORIF_EXPR, code1,
2588 swap_tree_comparison (code2),
2589 boolean_type_node, op1a, op1b);
2594 /* If both comparisons are of the same value against constants, we might
2595 be able to merge them. */
2596 if (operand_equal_p (op1a, op2a, 0)
2597 && TREE_CODE (op1b) == INTEGER_CST
2598 && TREE_CODE (op2b) == INTEGER_CST)
2600 int cmp = tree_int_cst_compare (op1b, op2b);
2602 /* If we have (op1a != op1b), we should either be able to
2603 return that or TRUE, depending on whether the constant op1b
2604 also satisfies the other comparison against op2b. */
2605 if (code1 == NE_EXPR)
2611 case EQ_EXPR: val = (cmp == 0); break;
2612 case NE_EXPR: val = (cmp != 0); break;
2613 case LT_EXPR: val = (cmp < 0); break;
2614 case GT_EXPR: val = (cmp > 0); break;
2615 case LE_EXPR: val = (cmp <= 0); break;
2616 case GE_EXPR: val = (cmp >= 0); break;
2617 default: done = false;
2622 return boolean_true_node;
2624 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2627 /* Likewise if the second comparison is a != comparison. */
2628 else if (code2 == NE_EXPR)
2634 case EQ_EXPR: val = (cmp == 0); break;
2635 case NE_EXPR: val = (cmp != 0); break;
2636 case LT_EXPR: val = (cmp > 0); break;
2637 case GT_EXPR: val = (cmp < 0); break;
2638 case LE_EXPR: val = (cmp >= 0); break;
2639 case GE_EXPR: val = (cmp <= 0); break;
2640 default: done = false;
2645 return boolean_true_node;
2647 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2651 /* See if an equality test is redundant with the other comparison. */
2652 else if (code1 == EQ_EXPR)
2657 case EQ_EXPR: val = (cmp == 0); break;
2658 case NE_EXPR: val = (cmp != 0); break;
2659 case LT_EXPR: val = (cmp < 0); break;
2660 case GT_EXPR: val = (cmp > 0); break;
2661 case LE_EXPR: val = (cmp <= 0); break;
2662 case GE_EXPR: val = (cmp >= 0); break;
2667 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2669 else if (code2 == EQ_EXPR)
2674 case EQ_EXPR: val = (cmp == 0); break;
2675 case NE_EXPR: val = (cmp != 0); break;
2676 case LT_EXPR: val = (cmp > 0); break;
2677 case GT_EXPR: val = (cmp < 0); break;
2678 case LE_EXPR: val = (cmp >= 0); break;
2679 case GE_EXPR: val = (cmp <= 0); break;
2684 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2687 /* Chose the less restrictive of two < or <= comparisons. */
2688 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2689 && (code2 == LT_EXPR || code2 == LE_EXPR))
2691 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2692 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2694 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2697 /* Likewise chose the less restrictive of two > or >= comparisons. */
2698 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2699 && (code2 == GT_EXPR || code2 == GE_EXPR))
2701 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2702 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2704 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2707 /* Check for singleton ranges. */
2709 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2710 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2711 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2713 /* Check for less/greater pairs that don't restrict the range at all. */
2715 && (code1 == LT_EXPR || code1 == LE_EXPR)
2716 && (code2 == GT_EXPR || code2 == GE_EXPR))
2717 return boolean_true_node;
2719 && (code1 == GT_EXPR || code1 == GE_EXPR)
2720 && (code2 == LT_EXPR || code2 == LE_EXPR))
2721 return boolean_true_node;
2724 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2725 NAME's definition is a truth value. See if there are any simplifications
2726 that can be done against the NAME's definition. */
2727 if (TREE_CODE (op1a) == SSA_NAME
2728 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2729 && (integer_zerop (op1b) || integer_onep (op1b)))
2731 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2732 || (code1 == NE_EXPR && integer_onep (op1b)));
2733 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2734 switch (gimple_code (stmt))
2737 /* Try to simplify by copy-propagating the definition. */
2738 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2741 /* If every argument to the PHI produces the same result when
2742 ORed with the second comparison, we win.
2743 Do not do this unless the type is bool since we need a bool
2744 result here anyway. */
2745 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2747 tree result = NULL_TREE;
2749 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2751 tree arg = gimple_phi_arg_def (stmt, i);
2753 /* If this PHI has itself as an argument, ignore it.
2754 If all the other args produce the same result,
2756 if (arg == gimple_phi_result (stmt))
2758 else if (TREE_CODE (arg) == INTEGER_CST)
2760 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2763 result = boolean_true_node;
2764 else if (!integer_onep (result))
2768 result = fold_build2 (code2, boolean_type_node,
2770 else if (!same_bool_comparison_p (result,
2774 else if (TREE_CODE (arg) == SSA_NAME
2775 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2778 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2779 /* In simple cases we can look through PHI nodes,
2780 but we have to be careful with loops.
2782 if (! dom_info_available_p (CDI_DOMINATORS)
2783 || gimple_bb (def_stmt) == gimple_bb (stmt)
2784 || dominated_by_p (CDI_DOMINATORS,
2785 gimple_bb (def_stmt),
2788 temp = or_var_with_comparison (arg, invert, code2,
2794 else if (!same_bool_result_p (result, temp))
2810 /* Try to simplify the OR of two comparisons, specified by
2811 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2812 If this can be simplified to a single expression (without requiring
2813 introducing more SSA variables to hold intermediate values),
2814 return the resulting tree. Otherwise return NULL_TREE.
2815 If the result expression is non-null, it has boolean type. */
2818 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2819 enum tree_code code2, tree op2a, tree op2b)
2821 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2825 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2829 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2831 Either NULL_TREE, a simplified but non-constant or a constant
2834 ??? This should go into a gimple-fold-inline.h file to be eventually
2835 privatized with the single valueize function used in the various TUs
2836 to avoid the indirect function call overhead. */
2839 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2841 location_t loc = gimple_location (stmt);
2842 switch (gimple_code (stmt))
2846 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2848 switch (get_gimple_rhs_class (subcode))
2850 case GIMPLE_SINGLE_RHS:
2852 tree rhs = gimple_assign_rhs1 (stmt);
2853 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2855 if (TREE_CODE (rhs) == SSA_NAME)
2857 /* If the RHS is an SSA_NAME, return its known constant value,
2859 return (*valueize) (rhs);
2861 /* Handle propagating invariant addresses into address
2863 else if (TREE_CODE (rhs) == ADDR_EXPR
2864 && !is_gimple_min_invariant (rhs))
2866 HOST_WIDE_INT offset;
2868 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2872 && (CONSTANT_CLASS_P (base)
2873 || decl_address_invariant_p (base)))
2874 return build_invariant_address (TREE_TYPE (rhs),
2877 else if (TREE_CODE (rhs) == CONSTRUCTOR
2878 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2879 && (CONSTRUCTOR_NELTS (rhs)
2880 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2886 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2888 val = (*valueize) (val);
2889 if (TREE_CODE (val) == INTEGER_CST
2890 || TREE_CODE (val) == REAL_CST
2891 || TREE_CODE (val) == FIXED_CST)
2892 list = tree_cons (NULL_TREE, val, list);
2897 return build_vector (TREE_TYPE (rhs), nreverse (list));
2900 if (kind == tcc_reference)
2902 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2903 || TREE_CODE (rhs) == REALPART_EXPR
2904 || TREE_CODE (rhs) == IMAGPART_EXPR)
2905 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2907 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2908 return fold_unary_loc (EXPR_LOCATION (rhs),
2910 TREE_TYPE (rhs), val);
2912 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2913 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2915 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2916 return fold_ternary_loc (EXPR_LOCATION (rhs),
2918 TREE_TYPE (rhs), val,
2919 TREE_OPERAND (rhs, 1),
2920 TREE_OPERAND (rhs, 2));
2922 else if (TREE_CODE (rhs) == MEM_REF
2923 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2925 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2926 if (TREE_CODE (val) == ADDR_EXPR
2927 && is_gimple_min_invariant (val))
2929 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2931 TREE_OPERAND (rhs, 1));
2936 return fold_const_aggregate_ref_1 (rhs, valueize);
2938 else if (kind == tcc_declaration)
2939 return get_symbol_constant_value (rhs);
2943 case GIMPLE_UNARY_RHS:
2945 /* Handle unary operators that can appear in GIMPLE form.
2946 Note that we know the single operand must be a constant,
2947 so this should almost always return a simplified RHS. */
2948 tree lhs = gimple_assign_lhs (stmt);
2949 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2951 /* Conversions are useless for CCP purposes if they are
2952 value-preserving. Thus the restrictions that
2953 useless_type_conversion_p places for pointer type conversions
2954 do not apply here. Substitution later will only substitute to
2956 if (CONVERT_EXPR_CODE_P (subcode)
2957 && POINTER_TYPE_P (TREE_TYPE (lhs))
2958 && POINTER_TYPE_P (TREE_TYPE (op0)))
2961 /* Try to re-construct array references on-the-fly. */
2962 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2964 && ((tem = maybe_fold_offset_to_address
2966 op0, integer_zero_node, TREE_TYPE (lhs)))
2973 fold_unary_ignore_overflow_loc (loc, subcode,
2974 gimple_expr_type (stmt), op0);
2977 case GIMPLE_BINARY_RHS:
2979 /* Handle binary operators that can appear in GIMPLE form. */
2980 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2981 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2983 /* Translate &x + CST into an invariant form suitable for
2984 further propagation. */
2985 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2986 && TREE_CODE (op0) == ADDR_EXPR
2987 && TREE_CODE (op1) == INTEGER_CST)
2989 tree off = fold_convert (ptr_type_node, op1);
2990 return build_fold_addr_expr_loc
2992 fold_build2 (MEM_REF,
2993 TREE_TYPE (TREE_TYPE (op0)),
2994 unshare_expr (op0), off));
2997 return fold_binary_loc (loc, subcode,
2998 gimple_expr_type (stmt), op0, op1);
3001 case GIMPLE_TERNARY_RHS:
3003 /* Handle ternary operators that can appear in GIMPLE form. */
3004 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
3005 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
3006 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
3008 return fold_ternary_loc (loc, subcode,
3009 gimple_expr_type (stmt), op0, op1, op2);
3021 if (gimple_call_internal_p (stmt))
3022 /* No folding yet for these functions. */
3025 fn = (*valueize) (gimple_call_fn (stmt));
3026 if (TREE_CODE (fn) == ADDR_EXPR
3027 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3028 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
3030 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
3033 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3034 args[i] = (*valueize) (gimple_call_arg (stmt, i));
3035 call = build_call_array_loc (loc,
3036 gimple_call_return_type (stmt),
3037 fn, gimple_call_num_args (stmt), args);
3038 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
3040 /* fold_call_expr wraps the result inside a NOP_EXPR. */
3041 STRIP_NOPS (retval);
3052 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3053 Returns NULL_TREE if folding to a constant is not possible, otherwise
3054 returns a constant according to is_gimple_min_invariant. */
3057 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
3059 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
3060 if (res && is_gimple_min_invariant (res))
3066 /* The following set of functions are supposed to fold references using
3067 their constant initializers. */
3069 static tree fold_ctor_reference (tree type, tree ctor,
3070 unsigned HOST_WIDE_INT offset,
3071 unsigned HOST_WIDE_INT size);
3073 /* See if we can find constructor defining value of BASE.
3074 When we know the consructor with constant offset (such as
3075 base is array[40] and we do know constructor of array), then
3076 BIT_OFFSET is adjusted accordingly.
3078 As a special case, return error_mark_node when constructor
3079 is not explicitly available, but it is known to be zero
3080 such as 'static const int a;'. */
3082 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
3083 tree (*valueize)(tree))
3085 HOST_WIDE_INT bit_offset2, size, max_size;
3086 if (TREE_CODE (base) == MEM_REF)
3088 if (!integer_zerop (TREE_OPERAND (base, 1)))
3090 if (!host_integerp (TREE_OPERAND (base, 1), 0))
3092 *bit_offset += (mem_ref_offset (base).low
3097 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3098 base = valueize (TREE_OPERAND (base, 0));
3099 if (!base || TREE_CODE (base) != ADDR_EXPR)
3101 base = TREE_OPERAND (base, 0);
3104 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3105 DECL_INITIAL. If BASE is a nested reference into another
3106 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3107 the inner reference. */
3108 switch (TREE_CODE (base))
3111 if (!const_value_known_p (base))
3116 if (!DECL_INITIAL (base)
3117 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
3118 return error_mark_node;
3119 return DECL_INITIAL (base);
3123 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
3124 if (max_size == -1 || size != max_size)
3126 *bit_offset += bit_offset2;
3127 return get_base_constructor (base, bit_offset, valueize);
3138 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3139 to the memory at bit OFFSET.
3141 We do only simple job of folding byte accesses. */
3144 fold_string_cst_ctor_reference (tree type, tree ctor,
3145 unsigned HOST_WIDE_INT offset,
3146 unsigned HOST_WIDE_INT size)
3148 if (INTEGRAL_TYPE_P (type)
3149 && (TYPE_MODE (type)
3150 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3151 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3153 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3154 && size == BITS_PER_UNIT
3155 && !(offset % BITS_PER_UNIT))
3157 offset /= BITS_PER_UNIT;
3158 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3159 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3162 const char a[20]="hello";
3165 might lead to offset greater than string length. In this case we
3166 know value is either initialized to 0 or out of bounds. Return 0
3168 return build_zero_cst (type);
3173 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3174 SIZE to the memory at bit OFFSET. */
3177 fold_array_ctor_reference (tree type, tree ctor,
3178 unsigned HOST_WIDE_INT offset,
3179 unsigned HOST_WIDE_INT size)
3181 unsigned HOST_WIDE_INT cnt;
3183 double_int low_bound, elt_size;
3184 double_int index, max_index;
3185 double_int access_index;
3186 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3187 HOST_WIDE_INT inner_offset;
3189 /* Compute low bound and elt size. */
3190 if (domain_type && TYPE_MIN_VALUE (domain_type))
3192 /* Static constructors for variably sized objects makes no sense. */
3193 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3194 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3197 low_bound = double_int_zero;
3198 /* Static constructors for variably sized objects makes no sense. */
3199 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3202 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3205 /* We can handle only constantly sized accesses that are known to not
3206 be larger than size of array element. */
3207 if (!TYPE_SIZE_UNIT (type)
3208 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3209 || double_int_cmp (elt_size,
3210 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3213 /* Compute the array index we look for. */
3214 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3215 elt_size, TRUNC_DIV_EXPR);
3216 access_index = double_int_add (access_index, low_bound);
3218 /* And offset within the access. */
3219 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3221 /* See if the array field is large enough to span whole access. We do not
3222 care to fold accesses spanning multiple array indexes. */
3223 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3226 index = double_int_sub (low_bound, double_int_one);
3227 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3229 /* Array constructor might explicitely set index, or specify range
3230 or leave index NULL meaning that it is next index after previous
3234 if (TREE_CODE (cfield) == INTEGER_CST)
3235 max_index = index = tree_to_double_int (cfield);
3238 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3239 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3240 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3244 max_index = index = double_int_add (index, double_int_one);
3246 /* Do we have match? */
3247 if (double_int_cmp (access_index, index, 1) >= 0
3248 && double_int_cmp (access_index, max_index, 1) <= 0)
3249 return fold_ctor_reference (type, cval, inner_offset, size);
3251 /* When memory is not explicitely mentioned in constructor,
3252 it is 0 (or out of range). */
3253 return build_zero_cst (type);
3256 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3257 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3260 fold_nonarray_ctor_reference (tree type, tree ctor,
3261 unsigned HOST_WIDE_INT offset,
3262 unsigned HOST_WIDE_INT size)
3264 unsigned HOST_WIDE_INT cnt;
3267 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3270 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3271 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3272 tree field_size = DECL_SIZE (cfield);
3273 double_int bitoffset;
3274 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3275 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3276 double_int bitoffset_end, access_end;
3278 /* Variable sized objects in static constructors makes no sense,
3279 but field_size can be NULL for flexible array members. */
3280 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3281 && TREE_CODE (byte_offset) == INTEGER_CST
3282 && (field_size != NULL_TREE
3283 ? TREE_CODE (field_size) == INTEGER_CST
3284 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3286 /* Compute bit offset of the field. */
3287 bitoffset = double_int_add (tree_to_double_int (field_offset),
3288 double_int_mul (byte_offset_cst,
3289 bits_per_unit_cst));
3290 /* Compute bit offset where the field ends. */
3291 if (field_size != NULL_TREE)
3292 bitoffset_end = double_int_add (bitoffset,
3293 tree_to_double_int (field_size));
3295 bitoffset_end = double_int_zero;
3297 access_end = double_int_add (uhwi_to_double_int (offset),
3298 uhwi_to_double_int (size));
3300 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3301 [BITOFFSET, BITOFFSET_END)? */
3302 if (double_int_cmp (access_end, bitoffset, 0) > 0
3303 && (field_size == NULL_TREE
3304 || double_int_cmp (uhwi_to_double_int (offset),
3305 bitoffset_end, 0) < 0))
3307 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3309 /* We do have overlap. Now see if field is large enough to
3310 cover the access. Give up for accesses spanning multiple
3312 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3314 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
3316 return fold_ctor_reference (type, cval,
3317 double_int_to_uhwi (inner_offset), size);
3320 /* When memory is not explicitely mentioned in constructor, it is 0. */
3321 return build_zero_cst (type);
3324 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3325 to the memory at bit OFFSET. */
3328 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3329 unsigned HOST_WIDE_INT size)
3333 /* We found the field with exact match. */
3334 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3336 return canonicalize_constructor_val (ctor);
3338 /* We are at the end of walk, see if we can view convert the
3340 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3341 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3342 && operand_equal_p (TYPE_SIZE (type),
3343 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3345 ret = canonicalize_constructor_val (ctor);
3346 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3351 if (TREE_CODE (ctor) == STRING_CST)
3352 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3353 if (TREE_CODE (ctor) == CONSTRUCTOR)
3356 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3357 return fold_array_ctor_reference (type, ctor, offset, size);
3359 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3365 /* Return the tree representing the element referenced by T if T is an
3366 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3367 names using VALUEIZE. Return NULL_TREE otherwise. */
3370 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3372 tree ctor, idx, base;
3373 HOST_WIDE_INT offset, size, max_size;
3376 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3377 return get_symbol_constant_value (t);
3379 tem = fold_read_from_constant_string (t);
3383 switch (TREE_CODE (t))
3386 case ARRAY_RANGE_REF:
3387 /* Constant indexes are handled well by get_base_constructor.
3388 Only special case variable offsets.
3389 FIXME: This code can't handle nested references with variable indexes
3390 (they will be handled only by iteration of ccp). Perhaps we can bring
3391 get_ref_base_and_extent here and make it use a valueize callback. */
3392 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3394 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3395 && host_integerp (idx, 0))
3397 tree low_bound, unit_size;
3399 /* If the resulting bit-offset is constant, track it. */
3400 if ((low_bound = array_ref_low_bound (t),
3401 host_integerp (low_bound, 0))
3402 && (unit_size = array_ref_element_size (t),
3403 host_integerp (unit_size, 1)))
3405 offset = TREE_INT_CST_LOW (idx);
3406 offset -= TREE_INT_CST_LOW (low_bound);
3407 offset *= TREE_INT_CST_LOW (unit_size);
3408 offset *= BITS_PER_UNIT;
3410 base = TREE_OPERAND (t, 0);
3411 ctor = get_base_constructor (base, &offset, valueize);
3412 /* Empty constructor. Always fold to 0. */
3413 if (ctor == error_mark_node)
3414 return build_zero_cst (TREE_TYPE (t));
3415 /* Out of bound array access. Value is undefined,
3419 /* We can not determine ctor. */
3422 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3423 TREE_INT_CST_LOW (unit_size)
3431 case TARGET_MEM_REF:
3433 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3434 ctor = get_base_constructor (base, &offset, valueize);
3436 /* Empty constructor. Always fold to 0. */
3437 if (ctor == error_mark_node)
3438 return build_zero_cst (TREE_TYPE (t));
3439 /* We do not know precise address. */
3440 if (max_size == -1 || max_size != size)
3442 /* We can not determine ctor. */
3446 /* Out of bound array access. Value is undefined, but don't fold. */
3450 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3455 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3456 if (c && TREE_CODE (c) == COMPLEX_CST)
3457 return fold_build1_loc (EXPR_LOCATION (t),
3458 TREE_CODE (t), TREE_TYPE (t), c);
3470 fold_const_aggregate_ref (tree t)
3472 return fold_const_aggregate_ref_1 (t, NULL);
3475 /* Return true iff VAL is a gimple expression that is known to be
3476 non-negative. Restricted to floating-point inputs. */
3479 gimple_val_nonnegative_real_p (tree val)
3483 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3485 /* Use existing logic for non-gimple trees. */
3486 if (tree_expr_nonnegative_p (val))
3489 if (TREE_CODE (val) != SSA_NAME)
3492 /* Currently we look only at the immediately defining statement
3493 to make this determination, since recursion on defining
3494 statements of operands can lead to quadratic behavior in the
3495 worst case. This is expected to catch almost all occurrences
3496 in practice. It would be possible to implement limited-depth
3497 recursion if important cases are lost. Alternatively, passes
3498 that need this information (such as the pow/powi lowering code
3499 in the cse_sincos pass) could be revised to provide it through
3500 dataflow propagation. */
3502 def_stmt = SSA_NAME_DEF_STMT (val);
3504 if (is_gimple_assign (def_stmt))
3508 /* See fold-const.c:tree_expr_nonnegative_p for additional
3509 cases that could be handled with recursion. */
3511 switch (gimple_assign_rhs_code (def_stmt))
3514 /* Always true for floating-point operands. */
3518 /* True if the two operands are identical (since we are
3519 restricted to floating-point inputs). */
3520 op0 = gimple_assign_rhs1 (def_stmt);
3521 op1 = gimple_assign_rhs2 (def_stmt);
3524 || operand_equal_p (op0, op1, 0))
3531 else if (is_gimple_call (def_stmt))
3533 tree fndecl = gimple_call_fndecl (def_stmt);
3535 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3539 switch (DECL_FUNCTION_CODE (fndecl))
3541 CASE_FLT_FN (BUILT_IN_ACOS):
3542 CASE_FLT_FN (BUILT_IN_ACOSH):
3543 CASE_FLT_FN (BUILT_IN_CABS):
3544 CASE_FLT_FN (BUILT_IN_COSH):
3545 CASE_FLT_FN (BUILT_IN_ERFC):
3546 CASE_FLT_FN (BUILT_IN_EXP):
3547 CASE_FLT_FN (BUILT_IN_EXP10):
3548 CASE_FLT_FN (BUILT_IN_EXP2):
3549 CASE_FLT_FN (BUILT_IN_FABS):
3550 CASE_FLT_FN (BUILT_IN_FDIM):
3551 CASE_FLT_FN (BUILT_IN_HYPOT):
3552 CASE_FLT_FN (BUILT_IN_POW10):
3555 CASE_FLT_FN (BUILT_IN_SQRT):
3556 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3557 nonnegative inputs. */
3558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3563 CASE_FLT_FN (BUILT_IN_POWI):
3564 /* True if the second argument is an even integer. */
3565 arg1 = gimple_call_arg (def_stmt, 1);
3567 if (TREE_CODE (arg1) == INTEGER_CST
3568 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3573 CASE_FLT_FN (BUILT_IN_POW):
3574 /* True if the second argument is an even integer-valued
3576 arg1 = gimple_call_arg (def_stmt, 1);
3578 if (TREE_CODE (arg1) == REAL_CST)
3583 c = TREE_REAL_CST (arg1);
3584 n = real_to_integer (&c);
3588 REAL_VALUE_TYPE cint;
3589 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3590 if (real_identical (&c, &cint))