1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node || !node->analyzed || node->global.inlined_to)
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval)
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
140 if (cfun && base && TREE_CODE (base) == VAR_DECL)
141 add_referenced_var (base);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
144 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
153 get_symbol_constant_value (tree sym)
155 if (const_value_known_p (sym))
157 tree val = DECL_INITIAL (sym);
160 val = canonicalize_constructor_val (val);
161 if (val && is_gimple_min_invariant (val))
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
172 return build_zero_cst (TREE_TYPE (sym));
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
186 maybe_fold_reference (tree expr, bool is_lhs)
191 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr) == REALPART_EXPR
193 || TREE_CODE (expr) == IMAGPART_EXPR)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr),
198 TREE_OPERAND (expr, 0));
199 else if (TREE_CODE (expr) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr),
204 TREE_OPERAND (expr, 0),
205 TREE_OPERAND (expr, 1),
206 TREE_OPERAND (expr, 2));
208 while (handled_component_p (*t))
209 t = &TREE_OPERAND (*t, 0);
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
216 bool volatile_p = TREE_THIS_VOLATILE (*t);
217 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
218 TREE_OPERAND (*t, 0),
219 TREE_OPERAND (*t, 1));
222 TREE_THIS_VOLATILE (tem) = volatile_p;
224 tem = maybe_fold_reference (expr, is_lhs);
232 && (result = fold_const_aggregate_ref (expr))
233 && is_gimple_min_invariant (result))
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t, 1))
240 && (TREE_THIS_VOLATILE (*t)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
249 && types_compatible_p (TREE_TYPE (*t),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t, 0), 0))))
254 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
255 tem = maybe_fold_reference (expr, is_lhs);
260 else if (TREE_CODE (*t) == TARGET_MEM_REF)
262 tree tem = maybe_fold_tmr (*t);
266 tem = maybe_fold_reference (expr, is_lhs);
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
283 fold_gimple_assign (gimple_stmt_iterator *si)
285 gimple stmt = gsi_stmt (*si);
286 enum tree_code subcode = gimple_assign_rhs_code (stmt);
287 location_t loc = gimple_location (stmt);
289 tree result = NULL_TREE;
291 switch (get_gimple_rhs_class (subcode))
293 case GIMPLE_SINGLE_RHS:
295 tree rhs = gimple_assign_rhs1 (stmt);
297 if (REFERENCE_CLASS_P (rhs))
298 return maybe_fold_reference (rhs, false);
300 else if (TREE_CODE (rhs) == ADDR_EXPR)
302 tree ref = TREE_OPERAND (rhs, 0);
303 tree tem = maybe_fold_reference (ref, true);
305 && TREE_CODE (tem) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem, 1)))
307 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
309 result = fold_convert (TREE_TYPE (rhs),
310 build_fold_addr_expr_loc (loc, tem));
311 else if (TREE_CODE (ref) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
316 else if (TREE_CODE (rhs) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
326 if (TREE_CODE (val) != INTEGER_CST
327 && TREE_CODE (val) != REAL_CST
328 && TREE_CODE (val) != FIXED_CST)
331 return build_vector_from_ctor (TREE_TYPE (rhs),
332 CONSTRUCTOR_ELTS (rhs));
335 else if (DECL_P (rhs))
336 return unshare_expr (get_symbol_constant_value (rhs));
338 /* If we couldn't fold the RHS, hand over to the generic
340 if (result == NULL_TREE)
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result);
348 if (result != rhs && valid_gimple_rhs_p (result))
355 case GIMPLE_UNARY_RHS:
357 tree rhs = gimple_assign_rhs1 (stmt);
359 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode)
368 && TREE_CODE (result) == INTEGER_CST
369 && TREE_CODE (rhs) == INTEGER_CST)
370 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
372 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (valid_gimple_rhs_p (result))
379 case GIMPLE_BINARY_RHS:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt) == NE_EXPR)
385 tree lhs = gimple_assign_lhs (stmt);
386 tree op1 = gimple_assign_rhs1 (stmt);
387 tree op2 = gimple_assign_rhs2 (stmt);
388 tree type = TREE_TYPE (op1);
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
394 if (TREE_CODE (op2) == INTEGER_CST
395 && (integer_zerop (op2) || integer_onep (op2))
396 && useless_type_conversion_p (TREE_TYPE (lhs), type))
398 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
399 bool is_logical_not = false;
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
404 || (cmp_code == NE_EXPR && integer_onep (op2)))
405 is_logical_not = true;
407 if (is_logical_not == false)
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type) == 1)
412 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
414 /* Otherwise we use !X -> X ^ 1. */
416 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
417 type, op1, build_int_cst (type, 1));
423 result = fold_binary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt));
430 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (valid_gimple_rhs_p (result))
436 case GIMPLE_TERNARY_RHS:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
440 tree op0 = gimple_assign_rhs1 (stmt);
443 location_t cond_loc = gimple_location (stmt);
445 if (COMPARISON_CLASS_P (op0))
447 fold_defer_overflow_warnings ();
448 tem = fold_binary_loc (cond_loc,
449 TREE_CODE (op0), TREE_TYPE (op0),
450 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set = (tem && is_gimple_condexpr (tem)
456 && valid_gimple_rhs_p (tem));
457 fold_undefer_overflow_warnings (set, stmt, 0);
459 else if (is_gimple_min_invariant (op0))
468 result = fold_build3_loc (cond_loc, COND_EXPR,
469 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
470 gimple_assign_rhs2 (stmt),
471 gimple_assign_rhs3 (stmt));
475 result = fold_ternary_loc (loc, subcode,
476 TREE_TYPE (gimple_assign_lhs (stmt)),
477 gimple_assign_rhs1 (stmt),
478 gimple_assign_rhs2 (stmt),
479 gimple_assign_rhs3 (stmt));
483 STRIP_USELESS_TYPE_CONVERSION (result);
484 if (valid_gimple_rhs_p (result))
489 case GIMPLE_INVALID_RHS:
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
502 fold_gimple_cond (gimple stmt)
504 tree result = fold_binary_loc (gimple_location (stmt),
505 gimple_cond_code (stmt),
507 gimple_cond_lhs (stmt),
508 gimple_cond_rhs (stmt));
512 STRIP_USELESS_TYPE_CONVERSION (result);
513 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
515 gimple_cond_set_condition_from_tree (stmt, result);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
537 gimple stmt, new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = gimple_seq_alloc();
540 struct gimplify_ctx gctx;
545 stmt = gsi_stmt (*si_p);
547 gcc_assert (is_gimple_call (stmt));
549 push_gimplify_context (&gctx);
550 gctx.into_ssa = gimple_in_ssa_p (cfun);
552 lhs = gimple_call_lhs (stmt);
553 if (lhs == NULL_TREE)
555 gimplify_and_add (expr, &stmts);
556 /* We can end up with folding a memcpy of an empty class assignment
557 which gets optimized away by C++ gimplification. */
558 if (gimple_seq_empty_p (stmts))
560 pop_gimplify_context (NULL);
561 if (gimple_in_ssa_p (cfun))
563 unlink_stmt_vdef (stmt);
566 gsi_remove (si_p, true);
572 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
573 new_stmt = gimple_build_assign (lhs, tmp);
574 i = gsi_last (stmts);
575 gsi_insert_after_without_update (&i, new_stmt,
576 GSI_CONTINUE_LINKING);
579 pop_gimplify_context (NULL);
581 if (gimple_has_location (stmt))
582 annotate_all_with_location (stmts, gimple_location (stmt));
584 /* First iterate over the replacement statements backward, assigning
585 virtual operands to their defining statements. */
587 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
589 new_stmt = gsi_stmt (i);
590 if (gimple_assign_single_p (new_stmt)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
595 vdef = gimple_vdef (stmt);
597 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
598 gimple_set_vdef (new_stmt, vdef);
599 if (TREE_CODE (vdef) == SSA_NAME)
600 SSA_NAME_DEF_STMT (vdef) = new_stmt;
601 laststore = new_stmt;
605 /* Second iterate over the statements forward, assigning virtual
606 operands to their uses. */
608 reaching_vuse = gimple_vuse (stmt);
609 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
611 /* Do not insert the last stmt in this loop but remember it
612 for replacing the original statement. */
615 gsi_insert_before (si_p, last, GSI_NEW_STMT);
618 new_stmt = gsi_stmt (i);
619 /* The replacement can expose previously unreferenced variables. */
620 if (gimple_in_ssa_p (cfun))
621 find_new_referenced_vars (new_stmt);
622 /* If the new statement possibly has a VUSE, update it with exact SSA
623 name we know will reach this one. */
624 if (gimple_has_mem_ops (new_stmt))
625 gimple_set_vuse (new_stmt, reaching_vuse);
626 gimple_set_modified (new_stmt, true);
627 if (gimple_vdef (new_stmt))
628 reaching_vuse = gimple_vdef (new_stmt);
632 /* If the new sequence does not do a store release the virtual
633 definition of the original statement. */
635 && reaching_vuse == gimple_vuse (stmt))
637 tree vdef = gimple_vdef (stmt);
639 && TREE_CODE (vdef) == SSA_NAME)
641 unlink_stmt_vdef (stmt);
642 release_ssa_name (vdef);
646 /* Finally replace rhe original statement with the last. */
647 gsi_replace (si_p, last, false);
650 /* Return the string length, maximum string length or maximum value of
652 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
653 is not NULL and, for TYPE == 0, its value is not equal to the length
654 we determine or if we are unable to determine the length or value,
655 return false. VISITED is a bitmap of visited variables.
656 TYPE is 0 if string length should be returned, 1 for maximum string
657 length and 2 for maximum value ARG can have. */
660 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
665 if (TREE_CODE (arg) != SSA_NAME)
667 if (TREE_CODE (arg) == COND_EXPR)
668 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
669 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
670 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
671 else if (TREE_CODE (arg) == ADDR_EXPR
672 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
673 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
675 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
676 if (TREE_CODE (aop0) == INDIRECT_REF
677 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
678 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
679 length, visited, type);
685 if (TREE_CODE (val) != INTEGER_CST
686 || tree_int_cst_sgn (val) < 0)
690 val = c_strlen (arg, 1);
698 if (TREE_CODE (*length) != INTEGER_CST
699 || TREE_CODE (val) != INTEGER_CST)
702 if (tree_int_cst_lt (*length, val))
706 else if (simple_cst_equal (val, *length) != 1)
714 /* If we were already here, break the infinite cycle. */
715 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
719 def_stmt = SSA_NAME_DEF_STMT (var);
721 switch (gimple_code (def_stmt))
724 /* The RHS of the statement defining VAR must either have a
725 constant length or come from another SSA_NAME with a constant
727 if (gimple_assign_single_p (def_stmt)
728 || gimple_assign_unary_nop_p (def_stmt))
730 tree rhs = gimple_assign_rhs1 (def_stmt);
731 return get_maxval_strlen (rhs, length, visited, type);
737 /* All the arguments of the PHI node must have the same constant
741 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
743 tree arg = gimple_phi_arg (def_stmt, i)->def;
745 /* If this PHI has itself as an argument, we cannot
746 determine the string length of this argument. However,
747 if we can find a constant string length for the other
748 PHI args then we can still be sure that this is a
749 constant string length. So be optimistic and just
750 continue with the next argument. */
751 if (arg == gimple_phi_result (def_stmt))
754 if (!get_maxval_strlen (arg, length, visited, type))
766 /* Fold builtin call in statement STMT. Returns a simplified tree.
767 We may return a non-constant expression, including another call
768 to a different function and with different arguments, e.g.,
769 substituting memcpy for strcpy when the string length is known.
770 Note that some builtins expand into inline code that may not
771 be valid in GIMPLE. Callers must take care. */
774 gimple_fold_builtin (gimple stmt)
782 location_t loc = gimple_location (stmt);
784 gcc_assert (is_gimple_call (stmt));
786 ignore = (gimple_call_lhs (stmt) == NULL);
788 /* First try the generic builtin folder. If that succeeds, return the
790 result = fold_call_stmt (stmt, ignore);
798 /* Ignore MD builtins. */
799 callee = gimple_call_fndecl (stmt);
800 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
803 /* Give up for always_inline inline builtins until they are
805 if (avoid_folding_inline_builtin (callee))
808 /* If the builtin could not be folded, and it has no argument list,
810 nargs = gimple_call_num_args (stmt);
814 /* Limit the work only for builtins we know how to simplify. */
815 switch (DECL_FUNCTION_CODE (callee))
817 case BUILT_IN_STRLEN:
819 case BUILT_IN_FPUTS_UNLOCKED:
823 case BUILT_IN_STRCPY:
824 case BUILT_IN_STRNCPY:
828 case BUILT_IN_MEMCPY_CHK:
829 case BUILT_IN_MEMPCPY_CHK:
830 case BUILT_IN_MEMMOVE_CHK:
831 case BUILT_IN_MEMSET_CHK:
832 case BUILT_IN_STRNCPY_CHK:
836 case BUILT_IN_STRCPY_CHK:
837 case BUILT_IN_STPCPY_CHK:
841 case BUILT_IN_SNPRINTF_CHK:
842 case BUILT_IN_VSNPRINTF_CHK:
850 if (arg_idx >= nargs)
853 /* Try to use the dataflow information gathered by the CCP process. */
854 visited = BITMAP_ALLOC (NULL);
855 bitmap_clear (visited);
857 memset (val, 0, sizeof (val));
858 a = gimple_call_arg (stmt, arg_idx);
859 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
860 val[arg_idx] = NULL_TREE;
862 BITMAP_FREE (visited);
865 switch (DECL_FUNCTION_CODE (callee))
867 case BUILT_IN_STRLEN:
868 if (val[0] && nargs == 1)
871 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
873 /* If the result is not a valid gimple value, or not a cast
874 of a valid gimple value, then we cannot use the result. */
875 if (is_gimple_val (new_val)
876 || (CONVERT_EXPR_P (new_val)
877 && is_gimple_val (TREE_OPERAND (new_val, 0))))
882 case BUILT_IN_STRCPY:
883 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
884 result = fold_builtin_strcpy (loc, callee,
885 gimple_call_arg (stmt, 0),
886 gimple_call_arg (stmt, 1),
890 case BUILT_IN_STRNCPY:
891 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
892 result = fold_builtin_strncpy (loc, callee,
893 gimple_call_arg (stmt, 0),
894 gimple_call_arg (stmt, 1),
895 gimple_call_arg (stmt, 2),
901 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
902 gimple_call_arg (stmt, 1),
903 ignore, false, val[0]);
906 case BUILT_IN_FPUTS_UNLOCKED:
908 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
909 gimple_call_arg (stmt, 1),
910 ignore, true, val[0]);
913 case BUILT_IN_MEMCPY_CHK:
914 case BUILT_IN_MEMPCPY_CHK:
915 case BUILT_IN_MEMMOVE_CHK:
916 case BUILT_IN_MEMSET_CHK:
917 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
918 result = fold_builtin_memory_chk (loc, callee,
919 gimple_call_arg (stmt, 0),
920 gimple_call_arg (stmt, 1),
921 gimple_call_arg (stmt, 2),
922 gimple_call_arg (stmt, 3),
924 DECL_FUNCTION_CODE (callee));
927 case BUILT_IN_STRCPY_CHK:
928 case BUILT_IN_STPCPY_CHK:
929 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
930 result = fold_builtin_stxcpy_chk (loc, callee,
931 gimple_call_arg (stmt, 0),
932 gimple_call_arg (stmt, 1),
933 gimple_call_arg (stmt, 2),
935 DECL_FUNCTION_CODE (callee));
938 case BUILT_IN_STRNCPY_CHK:
939 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
940 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
941 gimple_call_arg (stmt, 1),
942 gimple_call_arg (stmt, 2),
943 gimple_call_arg (stmt, 3),
947 case BUILT_IN_SNPRINTF_CHK:
948 case BUILT_IN_VSNPRINTF_CHK:
949 if (val[1] && is_gimple_val (val[1]))
950 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
951 DECL_FUNCTION_CODE (callee));
958 if (result && ignore)
959 result = fold_ignored_result (result);
963 /* Generate code adjusting the first parameter of a call statement determined
967 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
969 gimple call_stmt = gsi_stmt (*gsi);
973 delta = convert_to_ptrofftype (delta);
974 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
975 parm = gimple_call_arg (call_stmt, 0);
976 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
977 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
978 add_referenced_var (tmp);
980 tmp = make_ssa_name (tmp, NULL);
981 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
982 SSA_NAME_DEF_STMT (tmp) = new_stmt;
983 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
984 gimple_call_set_arg (call_stmt, 0, tmp);
987 /* Return a binfo to be used for devirtualization of calls based on an object
988 represented by a declaration (i.e. a global or automatically allocated one)
989 or NULL if it cannot be found or is not safe. CST is expected to be an
990 ADDR_EXPR of such object or the function will return NULL. Currently it is
991 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
994 gimple_extract_devirt_binfo_from_cst (tree cst)
996 HOST_WIDE_INT offset, size, max_size;
997 tree base, type, expected_type, binfo;
998 bool last_artificial = false;
1000 if (!flag_devirtualize
1001 || TREE_CODE (cst) != ADDR_EXPR
1002 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1005 cst = TREE_OPERAND (cst, 0);
1006 expected_type = TREE_TYPE (cst);
1007 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1008 type = TREE_TYPE (base);
1012 || TREE_CODE (type) != RECORD_TYPE)
1015 /* Find the sub-object the constant actually refers to and mark whether it is
1016 an artificial one (as opposed to a user-defined one). */
1019 HOST_WIDE_INT pos, size;
1022 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1027 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1029 if (TREE_CODE (fld) != FIELD_DECL)
1032 pos = int_bit_position (fld);
1033 size = tree_low_cst (DECL_SIZE (fld), 1);
1034 if (pos <= offset && (pos + size) > offset)
1037 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1040 last_artificial = DECL_ARTIFICIAL (fld);
1041 type = TREE_TYPE (fld);
1044 /* Artifical sub-objects are ancestors, we do not want to use them for
1045 devirtualization, at least not here. */
1046 if (last_artificial)
1048 binfo = TYPE_BINFO (type);
1049 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1055 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1056 The statement may be replaced by another statement, e.g., if the call
1057 simplifies to a constant value. Return true if any changes were made.
1058 It is assumed that the operands have been previously folded. */
1061 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1063 gimple stmt = gsi_stmt (*gsi);
1065 bool changed = false;
1068 /* Fold *& in call arguments. */
1069 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1070 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1072 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1075 gimple_call_set_arg (stmt, i, tmp);
1080 /* Check for virtual calls that became direct calls. */
1081 callee = gimple_call_fn (stmt);
1082 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1084 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1086 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1091 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1092 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1096 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1097 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1100 gimple_call_set_fndecl (stmt, fndecl);
1107 /* Check whether propagating into the function address made the
1108 call direct, and thus possibly non-inlineable.
1109 ??? This asks for a more conservative setting of the non-inlinable
1110 flag, namely true for all indirect calls. But that would require
1111 that we can re-compute the flag conservatively, thus it isn't
1112 ever initialized from something else than return/argument type
1114 callee = gimple_call_fndecl (stmt);
1116 && !gimple_check_call_matching_types (stmt, callee))
1117 gimple_call_set_cannot_inline (stmt, true);
1122 /* Check for builtins that CCP can handle using information not
1123 available in the generic fold routines. */
1124 if (callee && DECL_BUILT_IN (callee))
1126 tree result = gimple_fold_builtin (stmt);
1129 if (!update_call_from_tree (gsi, result))
1130 gimplify_and_update_call_from_tree (gsi, result);
1138 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1139 distinguishes both cases. */
1142 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1144 bool changed = false;
1145 gimple stmt = gsi_stmt (*gsi);
1147 gimple_stmt_iterator gsinext = *gsi;
1150 gsi_next (&gsinext);
1151 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1153 /* Fold the main computation performed by the statement. */
1154 switch (gimple_code (stmt))
1158 unsigned old_num_ops = gimple_num_ops (stmt);
1159 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1160 tree lhs = gimple_assign_lhs (stmt);
1162 /* First canonicalize operand order. This avoids building new
1163 trees if this is the only thing fold would later do. */
1164 if ((commutative_tree_code (subcode)
1165 || commutative_ternary_tree_code (subcode))
1166 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1167 gimple_assign_rhs2 (stmt), false))
1169 tree tem = gimple_assign_rhs1 (stmt);
1170 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1171 gimple_assign_set_rhs2 (stmt, tem);
1174 new_rhs = fold_gimple_assign (gsi);
1176 && !useless_type_conversion_p (TREE_TYPE (lhs),
1177 TREE_TYPE (new_rhs)))
1178 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1181 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1183 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1190 changed |= fold_gimple_cond (stmt);
1194 changed |= gimple_fold_call (gsi, inplace);
1198 /* Fold *& in asm operands. */
1201 const char **oconstraints;
1202 const char *constraint;
1203 bool allows_mem, allows_reg;
1205 noutputs = gimple_asm_noutputs (stmt);
1206 oconstraints = XALLOCAVEC (const char *, noutputs);
1208 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1210 tree link = gimple_asm_output_op (stmt, i);
1211 tree op = TREE_VALUE (link);
1213 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1214 if (REFERENCE_CLASS_P (op)
1215 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1217 TREE_VALUE (link) = op;
1221 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1223 tree link = gimple_asm_input_op (stmt, i);
1224 tree op = TREE_VALUE (link);
1226 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1227 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1228 oconstraints, &allows_mem, &allows_reg);
1229 if (REFERENCE_CLASS_P (op)
1230 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1233 TREE_VALUE (link) = op;
1241 if (gimple_debug_bind_p (stmt))
1243 tree val = gimple_debug_bind_get_value (stmt);
1245 && REFERENCE_CLASS_P (val))
1247 tree tem = maybe_fold_reference (val, false);
1250 gimple_debug_bind_set_value (stmt, tem);
1260 /* If stmt folds into nothing and it was the last stmt in a bb,
1261 don't call gsi_stmt. */
1262 if (gsi_end_p (*gsi))
1264 gcc_assert (next_stmt == NULL);
1268 stmt = gsi_stmt (*gsi);
1270 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1271 as we'd changing the next stmt. */
1272 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1274 tree lhs = gimple_get_lhs (stmt);
1275 if (lhs && REFERENCE_CLASS_P (lhs))
1277 tree new_lhs = maybe_fold_reference (lhs, true);
1280 gimple_set_lhs (stmt, new_lhs);
1289 /* Fold the statement pointed to by GSI. In some cases, this function may
1290 replace the whole statement with a new one. Returns true iff folding
1292 The statement pointed to by GSI should be in valid gimple form but may
1293 be in unfolded state as resulting from for example constant propagation
1294 which can produce *&x = 0. */
1297 fold_stmt (gimple_stmt_iterator *gsi)
1299 return fold_stmt_1 (gsi, false);
1302 /* Perform the minimal folding on statement *GSI. Only operations like
1303 *&x created by constant propagation are handled. The statement cannot
1304 be replaced with a new one. Return true if the statement was
1305 changed, false otherwise.
1306 The statement *GSI should be in valid gimple form but may
1307 be in unfolded state as resulting from for example constant propagation
1308 which can produce *&x = 0. */
1311 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1313 gimple stmt = gsi_stmt (*gsi);
1314 bool changed = fold_stmt_1 (gsi, true);
1315 gcc_assert (gsi_stmt (*gsi) == stmt);
1319 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1320 if EXPR is null or we don't know how.
1321 If non-null, the result always has boolean type. */
1324 canonicalize_bool (tree expr, bool invert)
1330 if (integer_nonzerop (expr))
1331 return boolean_false_node;
1332 else if (integer_zerop (expr))
1333 return boolean_true_node;
1334 else if (TREE_CODE (expr) == SSA_NAME)
1335 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1336 build_int_cst (TREE_TYPE (expr), 0));
1337 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1338 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1340 TREE_OPERAND (expr, 0),
1341 TREE_OPERAND (expr, 1));
1347 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1349 if (integer_nonzerop (expr))
1350 return boolean_true_node;
1351 else if (integer_zerop (expr))
1352 return boolean_false_node;
1353 else if (TREE_CODE (expr) == SSA_NAME)
1354 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1355 build_int_cst (TREE_TYPE (expr), 0));
1356 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1357 return fold_build2 (TREE_CODE (expr),
1359 TREE_OPERAND (expr, 0),
1360 TREE_OPERAND (expr, 1));
1366 /* Check to see if a boolean expression EXPR is logically equivalent to the
1367 comparison (OP1 CODE OP2). Check for various identities involving
1371 same_bool_comparison_p (const_tree expr, enum tree_code code,
1372 const_tree op1, const_tree op2)
1376 /* The obvious case. */
1377 if (TREE_CODE (expr) == code
1378 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1379 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1382 /* Check for comparing (name, name != 0) and the case where expr
1383 is an SSA_NAME with a definition matching the comparison. */
1384 if (TREE_CODE (expr) == SSA_NAME
1385 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1387 if (operand_equal_p (expr, op1, 0))
1388 return ((code == NE_EXPR && integer_zerop (op2))
1389 || (code == EQ_EXPR && integer_nonzerop (op2)));
1390 s = SSA_NAME_DEF_STMT (expr);
1391 if (is_gimple_assign (s)
1392 && gimple_assign_rhs_code (s) == code
1393 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1394 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1398 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1399 of name is a comparison, recurse. */
1400 if (TREE_CODE (op1) == SSA_NAME
1401 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1403 s = SSA_NAME_DEF_STMT (op1);
1404 if (is_gimple_assign (s)
1405 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1407 enum tree_code c = gimple_assign_rhs_code (s);
1408 if ((c == NE_EXPR && integer_zerop (op2))
1409 || (c == EQ_EXPR && integer_nonzerop (op2)))
1410 return same_bool_comparison_p (expr, c,
1411 gimple_assign_rhs1 (s),
1412 gimple_assign_rhs2 (s));
1413 if ((c == EQ_EXPR && integer_zerop (op2))
1414 || (c == NE_EXPR && integer_nonzerop (op2)))
1415 return same_bool_comparison_p (expr,
1416 invert_tree_comparison (c, false),
1417 gimple_assign_rhs1 (s),
1418 gimple_assign_rhs2 (s));
1424 /* Check to see if two boolean expressions OP1 and OP2 are logically
1428 same_bool_result_p (const_tree op1, const_tree op2)
1430 /* Simple cases first. */
1431 if (operand_equal_p (op1, op2, 0))
1434 /* Check the cases where at least one of the operands is a comparison.
1435 These are a bit smarter than operand_equal_p in that they apply some
1436 identifies on SSA_NAMEs. */
1437 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1438 && same_bool_comparison_p (op1, TREE_CODE (op2),
1439 TREE_OPERAND (op2, 0),
1440 TREE_OPERAND (op2, 1)))
1442 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1443 && same_bool_comparison_p (op2, TREE_CODE (op1),
1444 TREE_OPERAND (op1, 0),
1445 TREE_OPERAND (op1, 1)))
1452 /* Forward declarations for some mutually recursive functions. */
1455 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1456 enum tree_code code2, tree op2a, tree op2b);
1458 and_var_with_comparison (tree var, bool invert,
1459 enum tree_code code2, tree op2a, tree op2b);
1461 and_var_with_comparison_1 (gimple stmt,
1462 enum tree_code code2, tree op2a, tree op2b);
1464 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1465 enum tree_code code2, tree op2a, tree op2b);
1467 or_var_with_comparison (tree var, bool invert,
1468 enum tree_code code2, tree op2a, tree op2b);
1470 or_var_with_comparison_1 (gimple stmt,
1471 enum tree_code code2, tree op2a, tree op2b);
1473 /* Helper function for and_comparisons_1: try to simplify the AND of the
1474 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1475 If INVERT is true, invert the value of the VAR before doing the AND.
1476 Return NULL_EXPR if we can't simplify this to a single expression. */
1479 and_var_with_comparison (tree var, bool invert,
1480 enum tree_code code2, tree op2a, tree op2b)
1483 gimple stmt = SSA_NAME_DEF_STMT (var);
1485 /* We can only deal with variables whose definitions are assignments. */
1486 if (!is_gimple_assign (stmt))
1489 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1490 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1491 Then we only have to consider the simpler non-inverted cases. */
1493 t = or_var_with_comparison_1 (stmt,
1494 invert_tree_comparison (code2, false),
1497 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1498 return canonicalize_bool (t, invert);
1501 /* Try to simplify the AND of the ssa variable defined by the assignment
1502 STMT with the comparison specified by (OP2A CODE2 OP2B).
1503 Return NULL_EXPR if we can't simplify this to a single expression. */
1506 and_var_with_comparison_1 (gimple stmt,
1507 enum tree_code code2, tree op2a, tree op2b)
1509 tree var = gimple_assign_lhs (stmt);
1510 tree true_test_var = NULL_TREE;
1511 tree false_test_var = NULL_TREE;
1512 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1514 /* Check for identities like (var AND (var == 0)) => false. */
1515 if (TREE_CODE (op2a) == SSA_NAME
1516 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1518 if ((code2 == NE_EXPR && integer_zerop (op2b))
1519 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1521 true_test_var = op2a;
1522 if (var == true_test_var)
1525 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1526 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1528 false_test_var = op2a;
1529 if (var == false_test_var)
1530 return boolean_false_node;
1534 /* If the definition is a comparison, recurse on it. */
1535 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1537 tree t = and_comparisons_1 (innercode,
1538 gimple_assign_rhs1 (stmt),
1539 gimple_assign_rhs2 (stmt),
1547 /* If the definition is an AND or OR expression, we may be able to
1548 simplify by reassociating. */
1549 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1550 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1552 tree inner1 = gimple_assign_rhs1 (stmt);
1553 tree inner2 = gimple_assign_rhs2 (stmt);
1556 tree partial = NULL_TREE;
1557 bool is_and = (innercode == BIT_AND_EXPR);
1559 /* Check for boolean identities that don't require recursive examination
1561 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1562 inner1 AND (inner1 OR inner2) => inner1
1563 !inner1 AND (inner1 AND inner2) => false
1564 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1565 Likewise for similar cases involving inner2. */
1566 if (inner1 == true_test_var)
1567 return (is_and ? var : inner1);
1568 else if (inner2 == true_test_var)
1569 return (is_and ? var : inner2);
1570 else if (inner1 == false_test_var)
1572 ? boolean_false_node
1573 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1574 else if (inner2 == false_test_var)
1576 ? boolean_false_node
1577 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1579 /* Next, redistribute/reassociate the AND across the inner tests.
1580 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1581 if (TREE_CODE (inner1) == SSA_NAME
1582 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1583 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1584 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1585 gimple_assign_rhs1 (s),
1586 gimple_assign_rhs2 (s),
1587 code2, op2a, op2b)))
1589 /* Handle the AND case, where we are reassociating:
1590 (inner1 AND inner2) AND (op2a code2 op2b)
1592 If the partial result t is a constant, we win. Otherwise
1593 continue on to try reassociating with the other inner test. */
1596 if (integer_onep (t))
1598 else if (integer_zerop (t))
1599 return boolean_false_node;
1602 /* Handle the OR case, where we are redistributing:
1603 (inner1 OR inner2) AND (op2a code2 op2b)
1604 => (t OR (inner2 AND (op2a code2 op2b))) */
1605 else if (integer_onep (t))
1606 return boolean_true_node;
1608 /* Save partial result for later. */
1612 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1613 if (TREE_CODE (inner2) == SSA_NAME
1614 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1615 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1616 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1617 gimple_assign_rhs1 (s),
1618 gimple_assign_rhs2 (s),
1619 code2, op2a, op2b)))
1621 /* Handle the AND case, where we are reassociating:
1622 (inner1 AND inner2) AND (op2a code2 op2b)
1623 => (inner1 AND t) */
1626 if (integer_onep (t))
1628 else if (integer_zerop (t))
1629 return boolean_false_node;
1630 /* If both are the same, we can apply the identity
1632 else if (partial && same_bool_result_p (t, partial))
1636 /* Handle the OR case. where we are redistributing:
1637 (inner1 OR inner2) AND (op2a code2 op2b)
1638 => (t OR (inner1 AND (op2a code2 op2b)))
1639 => (t OR partial) */
1642 if (integer_onep (t))
1643 return boolean_true_node;
1646 /* We already got a simplification for the other
1647 operand to the redistributed OR expression. The
1648 interesting case is when at least one is false.
1649 Or, if both are the same, we can apply the identity
1651 if (integer_zerop (partial))
1653 else if (integer_zerop (t))
1655 else if (same_bool_result_p (t, partial))
1664 /* Try to simplify the AND of two comparisons defined by
1665 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1666 If this can be done without constructing an intermediate value,
1667 return the resulting tree; otherwise NULL_TREE is returned.
1668 This function is deliberately asymmetric as it recurses on SSA_DEFs
1669 in the first comparison but not the second. */
1672 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1673 enum tree_code code2, tree op2a, tree op2b)
1675 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1676 if (operand_equal_p (op1a, op2a, 0)
1677 && operand_equal_p (op1b, op2b, 0))
1679 /* Result will be either NULL_TREE, or a combined comparison. */
1680 tree t = combine_comparisons (UNKNOWN_LOCATION,
1681 TRUTH_ANDIF_EXPR, code1, code2,
1682 boolean_type_node, op1a, op1b);
1687 /* Likewise the swapped case of the above. */
1688 if (operand_equal_p (op1a, op2b, 0)
1689 && operand_equal_p (op1b, op2a, 0))
1691 /* Result will be either NULL_TREE, or a combined comparison. */
1692 tree t = combine_comparisons (UNKNOWN_LOCATION,
1693 TRUTH_ANDIF_EXPR, code1,
1694 swap_tree_comparison (code2),
1695 boolean_type_node, op1a, op1b);
1700 /* If both comparisons are of the same value against constants, we might
1701 be able to merge them. */
1702 if (operand_equal_p (op1a, op2a, 0)
1703 && TREE_CODE (op1b) == INTEGER_CST
1704 && TREE_CODE (op2b) == INTEGER_CST)
1706 int cmp = tree_int_cst_compare (op1b, op2b);
1708 /* If we have (op1a == op1b), we should either be able to
1709 return that or FALSE, depending on whether the constant op1b
1710 also satisfies the other comparison against op2b. */
1711 if (code1 == EQ_EXPR)
1717 case EQ_EXPR: val = (cmp == 0); break;
1718 case NE_EXPR: val = (cmp != 0); break;
1719 case LT_EXPR: val = (cmp < 0); break;
1720 case GT_EXPR: val = (cmp > 0); break;
1721 case LE_EXPR: val = (cmp <= 0); break;
1722 case GE_EXPR: val = (cmp >= 0); break;
1723 default: done = false;
1728 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1730 return boolean_false_node;
1733 /* Likewise if the second comparison is an == comparison. */
1734 else if (code2 == EQ_EXPR)
1740 case EQ_EXPR: val = (cmp == 0); break;
1741 case NE_EXPR: val = (cmp != 0); break;
1742 case LT_EXPR: val = (cmp > 0); break;
1743 case GT_EXPR: val = (cmp < 0); break;
1744 case LE_EXPR: val = (cmp >= 0); break;
1745 case GE_EXPR: val = (cmp <= 0); break;
1746 default: done = false;
1751 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1753 return boolean_false_node;
1757 /* Same business with inequality tests. */
1758 else if (code1 == NE_EXPR)
1763 case EQ_EXPR: val = (cmp != 0); break;
1764 case NE_EXPR: val = (cmp == 0); break;
1765 case LT_EXPR: val = (cmp >= 0); break;
1766 case GT_EXPR: val = (cmp <= 0); break;
1767 case LE_EXPR: val = (cmp > 0); break;
1768 case GE_EXPR: val = (cmp < 0); break;
1773 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1775 else if (code2 == NE_EXPR)
1780 case EQ_EXPR: val = (cmp == 0); break;
1781 case NE_EXPR: val = (cmp != 0); break;
1782 case LT_EXPR: val = (cmp <= 0); break;
1783 case GT_EXPR: val = (cmp >= 0); break;
1784 case LE_EXPR: val = (cmp < 0); break;
1785 case GE_EXPR: val = (cmp > 0); break;
1790 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1793 /* Chose the more restrictive of two < or <= comparisons. */
1794 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1795 && (code2 == LT_EXPR || code2 == LE_EXPR))
1797 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1798 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1800 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1803 /* Likewise chose the more restrictive of two > or >= comparisons. */
1804 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1805 && (code2 == GT_EXPR || code2 == GE_EXPR))
1807 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1808 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1810 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1813 /* Check for singleton ranges. */
1815 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1816 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1817 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1819 /* Check for disjoint ranges. */
1821 && (code1 == LT_EXPR || code1 == LE_EXPR)
1822 && (code2 == GT_EXPR || code2 == GE_EXPR))
1823 return boolean_false_node;
1825 && (code1 == GT_EXPR || code1 == GE_EXPR)
1826 && (code2 == LT_EXPR || code2 == LE_EXPR))
1827 return boolean_false_node;
1830 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1831 NAME's definition is a truth value. See if there are any simplifications
1832 that can be done against the NAME's definition. */
1833 if (TREE_CODE (op1a) == SSA_NAME
1834 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1835 && (integer_zerop (op1b) || integer_onep (op1b)))
1837 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1838 || (code1 == NE_EXPR && integer_onep (op1b)));
1839 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1840 switch (gimple_code (stmt))
1843 /* Try to simplify by copy-propagating the definition. */
1844 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1847 /* If every argument to the PHI produces the same result when
1848 ANDed with the second comparison, we win.
1849 Do not do this unless the type is bool since we need a bool
1850 result here anyway. */
1851 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1853 tree result = NULL_TREE;
1855 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1857 tree arg = gimple_phi_arg_def (stmt, i);
1859 /* If this PHI has itself as an argument, ignore it.
1860 If all the other args produce the same result,
1862 if (arg == gimple_phi_result (stmt))
1864 else if (TREE_CODE (arg) == INTEGER_CST)
1866 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1869 result = boolean_false_node;
1870 else if (!integer_zerop (result))
1874 result = fold_build2 (code2, boolean_type_node,
1876 else if (!same_bool_comparison_p (result,
1880 else if (TREE_CODE (arg) == SSA_NAME
1881 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1884 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1885 /* In simple cases we can look through PHI nodes,
1886 but we have to be careful with loops.
1888 if (! dom_info_available_p (CDI_DOMINATORS)
1889 || gimple_bb (def_stmt) == gimple_bb (stmt)
1890 || dominated_by_p (CDI_DOMINATORS,
1891 gimple_bb (def_stmt),
1894 temp = and_var_with_comparison (arg, invert, code2,
1900 else if (!same_bool_result_p (result, temp))
1916 /* Try to simplify the AND of two comparisons, specified by
1917 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1918 If this can be simplified to a single expression (without requiring
1919 introducing more SSA variables to hold intermediate values),
1920 return the resulting tree. Otherwise return NULL_TREE.
1921 If the result expression is non-null, it has boolean type. */
1924 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1925 enum tree_code code2, tree op2a, tree op2b)
1927 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1931 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1934 /* Helper function for or_comparisons_1: try to simplify the OR of the
1935 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1936 If INVERT is true, invert the value of VAR before doing the OR.
1937 Return NULL_EXPR if we can't simplify this to a single expression. */
1940 or_var_with_comparison (tree var, bool invert,
1941 enum tree_code code2, tree op2a, tree op2b)
1944 gimple stmt = SSA_NAME_DEF_STMT (var);
1946 /* We can only deal with variables whose definitions are assignments. */
1947 if (!is_gimple_assign (stmt))
1950 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1951 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1952 Then we only have to consider the simpler non-inverted cases. */
1954 t = and_var_with_comparison_1 (stmt,
1955 invert_tree_comparison (code2, false),
1958 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1959 return canonicalize_bool (t, invert);
1962 /* Try to simplify the OR of the ssa variable defined by the assignment
1963 STMT with the comparison specified by (OP2A CODE2 OP2B).
1964 Return NULL_EXPR if we can't simplify this to a single expression. */
1967 or_var_with_comparison_1 (gimple stmt,
1968 enum tree_code code2, tree op2a, tree op2b)
1970 tree var = gimple_assign_lhs (stmt);
1971 tree true_test_var = NULL_TREE;
1972 tree false_test_var = NULL_TREE;
1973 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1975 /* Check for identities like (var OR (var != 0)) => true . */
1976 if (TREE_CODE (op2a) == SSA_NAME
1977 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1979 if ((code2 == NE_EXPR && integer_zerop (op2b))
1980 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1982 true_test_var = op2a;
1983 if (var == true_test_var)
1986 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1987 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1989 false_test_var = op2a;
1990 if (var == false_test_var)
1991 return boolean_true_node;
1995 /* If the definition is a comparison, recurse on it. */
1996 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1998 tree t = or_comparisons_1 (innercode,
1999 gimple_assign_rhs1 (stmt),
2000 gimple_assign_rhs2 (stmt),
2008 /* If the definition is an AND or OR expression, we may be able to
2009 simplify by reassociating. */
2010 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2011 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2013 tree inner1 = gimple_assign_rhs1 (stmt);
2014 tree inner2 = gimple_assign_rhs2 (stmt);
2017 tree partial = NULL_TREE;
2018 bool is_or = (innercode == BIT_IOR_EXPR);
2020 /* Check for boolean identities that don't require recursive examination
2022 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2023 inner1 OR (inner1 AND inner2) => inner1
2024 !inner1 OR (inner1 OR inner2) => true
2025 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2027 if (inner1 == true_test_var)
2028 return (is_or ? var : inner1);
2029 else if (inner2 == true_test_var)
2030 return (is_or ? var : inner2);
2031 else if (inner1 == false_test_var)
2034 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2035 else if (inner2 == false_test_var)
2038 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2040 /* Next, redistribute/reassociate the OR across the inner tests.
2041 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2042 if (TREE_CODE (inner1) == SSA_NAME
2043 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2044 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2045 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2046 gimple_assign_rhs1 (s),
2047 gimple_assign_rhs2 (s),
2048 code2, op2a, op2b)))
2050 /* Handle the OR case, where we are reassociating:
2051 (inner1 OR inner2) OR (op2a code2 op2b)
2053 If the partial result t is a constant, we win. Otherwise
2054 continue on to try reassociating with the other inner test. */
2057 if (integer_onep (t))
2058 return boolean_true_node;
2059 else if (integer_zerop (t))
2063 /* Handle the AND case, where we are redistributing:
2064 (inner1 AND inner2) OR (op2a code2 op2b)
2065 => (t AND (inner2 OR (op2a code op2b))) */
2066 else if (integer_zerop (t))
2067 return boolean_false_node;
2069 /* Save partial result for later. */
2073 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2074 if (TREE_CODE (inner2) == SSA_NAME
2075 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2076 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2077 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2078 gimple_assign_rhs1 (s),
2079 gimple_assign_rhs2 (s),
2080 code2, op2a, op2b)))
2082 /* Handle the OR case, where we are reassociating:
2083 (inner1 OR inner2) OR (op2a code2 op2b)
2085 => (t OR partial) */
2088 if (integer_zerop (t))
2090 else if (integer_onep (t))
2091 return boolean_true_node;
2092 /* If both are the same, we can apply the identity
2094 else if (partial && same_bool_result_p (t, partial))
2098 /* Handle the AND case, where we are redistributing:
2099 (inner1 AND inner2) OR (op2a code2 op2b)
2100 => (t AND (inner1 OR (op2a code2 op2b)))
2101 => (t AND partial) */
2104 if (integer_zerop (t))
2105 return boolean_false_node;
2108 /* We already got a simplification for the other
2109 operand to the redistributed AND expression. The
2110 interesting case is when at least one is true.
2111 Or, if both are the same, we can apply the identity
2113 if (integer_onep (partial))
2115 else if (integer_onep (t))
2117 else if (same_bool_result_p (t, partial))
2126 /* Try to simplify the OR of two comparisons defined by
2127 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2128 If this can be done without constructing an intermediate value,
2129 return the resulting tree; otherwise NULL_TREE is returned.
2130 This function is deliberately asymmetric as it recurses on SSA_DEFs
2131 in the first comparison but not the second. */
2134 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2135 enum tree_code code2, tree op2a, tree op2b)
2137 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2138 if (operand_equal_p (op1a, op2a, 0)
2139 && operand_equal_p (op1b, op2b, 0))
2141 /* Result will be either NULL_TREE, or a combined comparison. */
2142 tree t = combine_comparisons (UNKNOWN_LOCATION,
2143 TRUTH_ORIF_EXPR, code1, code2,
2144 boolean_type_node, op1a, op1b);
2149 /* Likewise the swapped case of the above. */
2150 if (operand_equal_p (op1a, op2b, 0)
2151 && operand_equal_p (op1b, op2a, 0))
2153 /* Result will be either NULL_TREE, or a combined comparison. */
2154 tree t = combine_comparisons (UNKNOWN_LOCATION,
2155 TRUTH_ORIF_EXPR, code1,
2156 swap_tree_comparison (code2),
2157 boolean_type_node, op1a, op1b);
2162 /* If both comparisons are of the same value against constants, we might
2163 be able to merge them. */
2164 if (operand_equal_p (op1a, op2a, 0)
2165 && TREE_CODE (op1b) == INTEGER_CST
2166 && TREE_CODE (op2b) == INTEGER_CST)
2168 int cmp = tree_int_cst_compare (op1b, op2b);
2170 /* If we have (op1a != op1b), we should either be able to
2171 return that or TRUE, depending on whether the constant op1b
2172 also satisfies the other comparison against op2b. */
2173 if (code1 == NE_EXPR)
2179 case EQ_EXPR: val = (cmp == 0); break;
2180 case NE_EXPR: val = (cmp != 0); break;
2181 case LT_EXPR: val = (cmp < 0); break;
2182 case GT_EXPR: val = (cmp > 0); break;
2183 case LE_EXPR: val = (cmp <= 0); break;
2184 case GE_EXPR: val = (cmp >= 0); break;
2185 default: done = false;
2190 return boolean_true_node;
2192 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2195 /* Likewise if the second comparison is a != comparison. */
2196 else if (code2 == NE_EXPR)
2202 case EQ_EXPR: val = (cmp == 0); break;
2203 case NE_EXPR: val = (cmp != 0); break;
2204 case LT_EXPR: val = (cmp > 0); break;
2205 case GT_EXPR: val = (cmp < 0); break;
2206 case LE_EXPR: val = (cmp >= 0); break;
2207 case GE_EXPR: val = (cmp <= 0); break;
2208 default: done = false;
2213 return boolean_true_node;
2215 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2219 /* See if an equality test is redundant with the other comparison. */
2220 else if (code1 == EQ_EXPR)
2225 case EQ_EXPR: val = (cmp == 0); break;
2226 case NE_EXPR: val = (cmp != 0); break;
2227 case LT_EXPR: val = (cmp < 0); break;
2228 case GT_EXPR: val = (cmp > 0); break;
2229 case LE_EXPR: val = (cmp <= 0); break;
2230 case GE_EXPR: val = (cmp >= 0); break;
2235 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2237 else if (code2 == EQ_EXPR)
2242 case EQ_EXPR: val = (cmp == 0); break;
2243 case NE_EXPR: val = (cmp != 0); break;
2244 case LT_EXPR: val = (cmp > 0); break;
2245 case GT_EXPR: val = (cmp < 0); break;
2246 case LE_EXPR: val = (cmp >= 0); break;
2247 case GE_EXPR: val = (cmp <= 0); break;
2252 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2255 /* Chose the less restrictive of two < or <= comparisons. */
2256 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2257 && (code2 == LT_EXPR || code2 == LE_EXPR))
2259 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2260 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2262 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2265 /* Likewise chose the less restrictive of two > or >= comparisons. */
2266 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2267 && (code2 == GT_EXPR || code2 == GE_EXPR))
2269 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2270 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2272 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2275 /* Check for singleton ranges. */
2277 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2278 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2279 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2281 /* Check for less/greater pairs that don't restrict the range at all. */
2283 && (code1 == LT_EXPR || code1 == LE_EXPR)
2284 && (code2 == GT_EXPR || code2 == GE_EXPR))
2285 return boolean_true_node;
2287 && (code1 == GT_EXPR || code1 == GE_EXPR)
2288 && (code2 == LT_EXPR || code2 == LE_EXPR))
2289 return boolean_true_node;
2292 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2293 NAME's definition is a truth value. See if there are any simplifications
2294 that can be done against the NAME's definition. */
2295 if (TREE_CODE (op1a) == SSA_NAME
2296 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2297 && (integer_zerop (op1b) || integer_onep (op1b)))
2299 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2300 || (code1 == NE_EXPR && integer_onep (op1b)));
2301 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2302 switch (gimple_code (stmt))
2305 /* Try to simplify by copy-propagating the definition. */
2306 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2309 /* If every argument to the PHI produces the same result when
2310 ORed with the second comparison, we win.
2311 Do not do this unless the type is bool since we need a bool
2312 result here anyway. */
2313 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2315 tree result = NULL_TREE;
2317 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2319 tree arg = gimple_phi_arg_def (stmt, i);
2321 /* If this PHI has itself as an argument, ignore it.
2322 If all the other args produce the same result,
2324 if (arg == gimple_phi_result (stmt))
2326 else if (TREE_CODE (arg) == INTEGER_CST)
2328 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2331 result = boolean_true_node;
2332 else if (!integer_onep (result))
2336 result = fold_build2 (code2, boolean_type_node,
2338 else if (!same_bool_comparison_p (result,
2342 else if (TREE_CODE (arg) == SSA_NAME
2343 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2346 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2347 /* In simple cases we can look through PHI nodes,
2348 but we have to be careful with loops.
2350 if (! dom_info_available_p (CDI_DOMINATORS)
2351 || gimple_bb (def_stmt) == gimple_bb (stmt)
2352 || dominated_by_p (CDI_DOMINATORS,
2353 gimple_bb (def_stmt),
2356 temp = or_var_with_comparison (arg, invert, code2,
2362 else if (!same_bool_result_p (result, temp))
2378 /* Try to simplify the OR of two comparisons, specified by
2379 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2380 If this can be simplified to a single expression (without requiring
2381 introducing more SSA variables to hold intermediate values),
2382 return the resulting tree. Otherwise return NULL_TREE.
2383 If the result expression is non-null, it has boolean type. */
2386 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2387 enum tree_code code2, tree op2a, tree op2b)
2389 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2393 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2397 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2399 Either NULL_TREE, a simplified but non-constant or a constant
2402 ??? This should go into a gimple-fold-inline.h file to be eventually
2403 privatized with the single valueize function used in the various TUs
2404 to avoid the indirect function call overhead. */
2407 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2409 location_t loc = gimple_location (stmt);
2410 switch (gimple_code (stmt))
2414 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2416 switch (get_gimple_rhs_class (subcode))
2418 case GIMPLE_SINGLE_RHS:
2420 tree rhs = gimple_assign_rhs1 (stmt);
2421 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2423 if (TREE_CODE (rhs) == SSA_NAME)
2425 /* If the RHS is an SSA_NAME, return its known constant value,
2427 return (*valueize) (rhs);
2429 /* Handle propagating invariant addresses into address
2431 else if (TREE_CODE (rhs) == ADDR_EXPR
2432 && !is_gimple_min_invariant (rhs))
2434 HOST_WIDE_INT offset;
2436 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2440 && (CONSTANT_CLASS_P (base)
2441 || decl_address_invariant_p (base)))
2442 return build_invariant_address (TREE_TYPE (rhs),
2445 else if (TREE_CODE (rhs) == CONSTRUCTOR
2446 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2447 && (CONSTRUCTOR_NELTS (rhs)
2448 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2454 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2456 val = (*valueize) (val);
2457 if (TREE_CODE (val) == INTEGER_CST
2458 || TREE_CODE (val) == REAL_CST
2459 || TREE_CODE (val) == FIXED_CST)
2460 list = tree_cons (NULL_TREE, val, list);
2465 return build_vector (TREE_TYPE (rhs), nreverse (list));
2468 if (kind == tcc_reference)
2470 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2471 || TREE_CODE (rhs) == REALPART_EXPR
2472 || TREE_CODE (rhs) == IMAGPART_EXPR)
2473 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2475 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2476 return fold_unary_loc (EXPR_LOCATION (rhs),
2478 TREE_TYPE (rhs), val);
2480 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2481 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2483 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2484 return fold_ternary_loc (EXPR_LOCATION (rhs),
2486 TREE_TYPE (rhs), val,
2487 TREE_OPERAND (rhs, 1),
2488 TREE_OPERAND (rhs, 2));
2490 else if (TREE_CODE (rhs) == MEM_REF
2491 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2493 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2494 if (TREE_CODE (val) == ADDR_EXPR
2495 && is_gimple_min_invariant (val))
2497 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2499 TREE_OPERAND (rhs, 1));
2504 return fold_const_aggregate_ref_1 (rhs, valueize);
2506 else if (kind == tcc_declaration)
2507 return get_symbol_constant_value (rhs);
2511 case GIMPLE_UNARY_RHS:
2513 /* Handle unary operators that can appear in GIMPLE form.
2514 Note that we know the single operand must be a constant,
2515 so this should almost always return a simplified RHS. */
2516 tree lhs = gimple_assign_lhs (stmt);
2517 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2519 /* Conversions are useless for CCP purposes if they are
2520 value-preserving. Thus the restrictions that
2521 useless_type_conversion_p places for restrict qualification
2522 of pointer types should not apply here.
2523 Substitution later will only substitute to allowed places. */
2524 if (CONVERT_EXPR_CODE_P (subcode)
2525 && POINTER_TYPE_P (TREE_TYPE (lhs))
2526 && POINTER_TYPE_P (TREE_TYPE (op0))
2527 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2528 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2532 fold_unary_ignore_overflow_loc (loc, subcode,
2533 gimple_expr_type (stmt), op0);
2536 case GIMPLE_BINARY_RHS:
2538 /* Handle binary operators that can appear in GIMPLE form. */
2539 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2540 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2542 /* Translate &x + CST into an invariant form suitable for
2543 further propagation. */
2544 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2545 && TREE_CODE (op0) == ADDR_EXPR
2546 && TREE_CODE (op1) == INTEGER_CST)
2548 tree off = fold_convert (ptr_type_node, op1);
2549 return build_fold_addr_expr_loc
2551 fold_build2 (MEM_REF,
2552 TREE_TYPE (TREE_TYPE (op0)),
2553 unshare_expr (op0), off));
2556 return fold_binary_loc (loc, subcode,
2557 gimple_expr_type (stmt), op0, op1);
2560 case GIMPLE_TERNARY_RHS:
2562 /* Handle ternary operators that can appear in GIMPLE form. */
2563 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2564 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2565 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2567 /* Fold embedded expressions in ternary codes. */
2568 if ((subcode == COND_EXPR
2569 || subcode == VEC_COND_EXPR)
2570 && COMPARISON_CLASS_P (op0))
2572 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2573 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2574 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2575 TREE_TYPE (op0), op00, op01);
2580 return fold_ternary_loc (loc, subcode,
2581 gimple_expr_type (stmt), op0, op1, op2);
2593 if (gimple_call_internal_p (stmt))
2594 /* No folding yet for these functions. */
2597 fn = (*valueize) (gimple_call_fn (stmt));
2598 if (TREE_CODE (fn) == ADDR_EXPR
2599 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2600 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2602 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2605 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2606 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2607 call = build_call_array_loc (loc,
2608 gimple_call_return_type (stmt),
2609 fn, gimple_call_num_args (stmt), args);
2610 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2612 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2613 STRIP_NOPS (retval);
2624 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2625 Returns NULL_TREE if folding to a constant is not possible, otherwise
2626 returns a constant according to is_gimple_min_invariant. */
2629 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2631 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2632 if (res && is_gimple_min_invariant (res))
2638 /* The following set of functions are supposed to fold references using
2639 their constant initializers. */
2641 static tree fold_ctor_reference (tree type, tree ctor,
2642 unsigned HOST_WIDE_INT offset,
2643 unsigned HOST_WIDE_INT size);
2645 /* See if we can find constructor defining value of BASE.
2646 When we know the consructor with constant offset (such as
2647 base is array[40] and we do know constructor of array), then
2648 BIT_OFFSET is adjusted accordingly.
2650 As a special case, return error_mark_node when constructor
2651 is not explicitly available, but it is known to be zero
2652 such as 'static const int a;'. */
2654 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2655 tree (*valueize)(tree))
2657 HOST_WIDE_INT bit_offset2, size, max_size;
2658 if (TREE_CODE (base) == MEM_REF)
2660 if (!integer_zerop (TREE_OPERAND (base, 1)))
2662 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2664 *bit_offset += (mem_ref_offset (base).low
2669 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2670 base = valueize (TREE_OPERAND (base, 0));
2671 if (!base || TREE_CODE (base) != ADDR_EXPR)
2673 base = TREE_OPERAND (base, 0);
2676 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2677 DECL_INITIAL. If BASE is a nested reference into another
2678 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2679 the inner reference. */
2680 switch (TREE_CODE (base))
2683 if (!const_value_known_p (base))
2688 if (!DECL_INITIAL (base)
2689 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2690 return error_mark_node;
2691 return DECL_INITIAL (base);
2695 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2696 if (max_size == -1 || size != max_size)
2698 *bit_offset += bit_offset2;
2699 return get_base_constructor (base, bit_offset, valueize);
2710 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2711 to the memory at bit OFFSET.
2713 We do only simple job of folding byte accesses. */
2716 fold_string_cst_ctor_reference (tree type, tree ctor,
2717 unsigned HOST_WIDE_INT offset,
2718 unsigned HOST_WIDE_INT size)
2720 if (INTEGRAL_TYPE_P (type)
2721 && (TYPE_MODE (type)
2722 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2723 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2725 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2726 && size == BITS_PER_UNIT
2727 && !(offset % BITS_PER_UNIT))
2729 offset /= BITS_PER_UNIT;
2730 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2731 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2734 const char a[20]="hello";
2737 might lead to offset greater than string length. In this case we
2738 know value is either initialized to 0 or out of bounds. Return 0
2740 return build_zero_cst (type);
2745 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2746 SIZE to the memory at bit OFFSET. */
2749 fold_array_ctor_reference (tree type, tree ctor,
2750 unsigned HOST_WIDE_INT offset,
2751 unsigned HOST_WIDE_INT size)
2753 unsigned HOST_WIDE_INT cnt;
2755 double_int low_bound, elt_size;
2756 double_int index, max_index;
2757 double_int access_index;
2758 tree domain_type = NULL_TREE;
2759 HOST_WIDE_INT inner_offset;
2761 /* Compute low bound and elt size. */
2762 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2763 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2764 if (domain_type && TYPE_MIN_VALUE (domain_type))
2766 /* Static constructors for variably sized objects makes no sense. */
2767 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2768 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2771 low_bound = double_int_zero;
2772 /* Static constructors for variably sized objects makes no sense. */
2773 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2776 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2779 /* We can handle only constantly sized accesses that are known to not
2780 be larger than size of array element. */
2781 if (!TYPE_SIZE_UNIT (type)
2782 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2783 || double_int_cmp (elt_size,
2784 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2787 /* Compute the array index we look for. */
2788 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2789 elt_size, TRUNC_DIV_EXPR);
2790 access_index = double_int_add (access_index, low_bound);
2792 /* And offset within the access. */
2793 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2795 /* See if the array field is large enough to span whole access. We do not
2796 care to fold accesses spanning multiple array indexes. */
2797 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2800 index = double_int_sub (low_bound, double_int_one);
2801 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2803 /* Array constructor might explicitely set index, or specify range
2804 or leave index NULL meaning that it is next index after previous
2808 if (TREE_CODE (cfield) == INTEGER_CST)
2809 max_index = index = tree_to_double_int (cfield);
2812 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2813 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2814 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2818 max_index = index = double_int_add (index, double_int_one);
2820 /* Do we have match? */
2821 if (double_int_cmp (access_index, index, 1) >= 0
2822 && double_int_cmp (access_index, max_index, 1) <= 0)
2823 return fold_ctor_reference (type, cval, inner_offset, size);
2825 /* When memory is not explicitely mentioned in constructor,
2826 it is 0 (or out of range). */
2827 return build_zero_cst (type);
2830 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2831 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2834 fold_nonarray_ctor_reference (tree type, tree ctor,
2835 unsigned HOST_WIDE_INT offset,
2836 unsigned HOST_WIDE_INT size)
2838 unsigned HOST_WIDE_INT cnt;
2841 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2844 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2845 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2846 tree field_size = DECL_SIZE (cfield);
2847 double_int bitoffset;
2848 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2849 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2850 double_int bitoffset_end, access_end;
2852 /* Variable sized objects in static constructors makes no sense,
2853 but field_size can be NULL for flexible array members. */
2854 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2855 && TREE_CODE (byte_offset) == INTEGER_CST
2856 && (field_size != NULL_TREE
2857 ? TREE_CODE (field_size) == INTEGER_CST
2858 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2860 /* Compute bit offset of the field. */
2861 bitoffset = double_int_add (tree_to_double_int (field_offset),
2862 double_int_mul (byte_offset_cst,
2863 bits_per_unit_cst));
2864 /* Compute bit offset where the field ends. */
2865 if (field_size != NULL_TREE)
2866 bitoffset_end = double_int_add (bitoffset,
2867 tree_to_double_int (field_size));
2869 bitoffset_end = double_int_zero;
2871 access_end = double_int_add (uhwi_to_double_int (offset),
2872 uhwi_to_double_int (size));
2874 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2875 [BITOFFSET, BITOFFSET_END)? */
2876 if (double_int_cmp (access_end, bitoffset, 0) > 0
2877 && (field_size == NULL_TREE
2878 || double_int_cmp (uhwi_to_double_int (offset),
2879 bitoffset_end, 0) < 0))
2881 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2883 /* We do have overlap. Now see if field is large enough to
2884 cover the access. Give up for accesses spanning multiple
2886 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2888 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2890 return fold_ctor_reference (type, cval,
2891 double_int_to_uhwi (inner_offset), size);
2894 /* When memory is not explicitely mentioned in constructor, it is 0. */
2895 return build_zero_cst (type);
2898 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2899 to the memory at bit OFFSET. */
2902 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2903 unsigned HOST_WIDE_INT size)
2907 /* We found the field with exact match. */
2908 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2910 return canonicalize_constructor_val (ctor);
2912 /* We are at the end of walk, see if we can view convert the
2914 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2915 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2916 && operand_equal_p (TYPE_SIZE (type),
2917 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2919 ret = canonicalize_constructor_val (ctor);
2920 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2925 if (TREE_CODE (ctor) == STRING_CST)
2926 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2927 if (TREE_CODE (ctor) == CONSTRUCTOR)
2930 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2931 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2932 return fold_array_ctor_reference (type, ctor, offset, size);
2934 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2940 /* Return the tree representing the element referenced by T if T is an
2941 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2942 names using VALUEIZE. Return NULL_TREE otherwise. */
2945 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2947 tree ctor, idx, base;
2948 HOST_WIDE_INT offset, size, max_size;
2951 if (TREE_THIS_VOLATILE (t))
2954 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2955 return get_symbol_constant_value (t);
2957 tem = fold_read_from_constant_string (t);
2961 switch (TREE_CODE (t))
2964 case ARRAY_RANGE_REF:
2965 /* Constant indexes are handled well by get_base_constructor.
2966 Only special case variable offsets.
2967 FIXME: This code can't handle nested references with variable indexes
2968 (they will be handled only by iteration of ccp). Perhaps we can bring
2969 get_ref_base_and_extent here and make it use a valueize callback. */
2970 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2972 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2973 && host_integerp (idx, 0))
2975 tree low_bound, unit_size;
2977 /* If the resulting bit-offset is constant, track it. */
2978 if ((low_bound = array_ref_low_bound (t),
2979 host_integerp (low_bound, 0))
2980 && (unit_size = array_ref_element_size (t),
2981 host_integerp (unit_size, 1)))
2983 offset = TREE_INT_CST_LOW (idx);
2984 offset -= TREE_INT_CST_LOW (low_bound);
2985 offset *= TREE_INT_CST_LOW (unit_size);
2986 offset *= BITS_PER_UNIT;
2988 base = TREE_OPERAND (t, 0);
2989 ctor = get_base_constructor (base, &offset, valueize);
2990 /* Empty constructor. Always fold to 0. */
2991 if (ctor == error_mark_node)
2992 return build_zero_cst (TREE_TYPE (t));
2993 /* Out of bound array access. Value is undefined,
2997 /* We can not determine ctor. */
3000 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3001 TREE_INT_CST_LOW (unit_size)
3009 case TARGET_MEM_REF:
3011 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3012 ctor = get_base_constructor (base, &offset, valueize);
3014 /* Empty constructor. Always fold to 0. */
3015 if (ctor == error_mark_node)
3016 return build_zero_cst (TREE_TYPE (t));
3017 /* We do not know precise address. */
3018 if (max_size == -1 || max_size != size)
3020 /* We can not determine ctor. */
3024 /* Out of bound array access. Value is undefined, but don't fold. */
3028 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3033 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3034 if (c && TREE_CODE (c) == COMPLEX_CST)
3035 return fold_build1_loc (EXPR_LOCATION (t),
3036 TREE_CODE (t), TREE_TYPE (t), c);
3048 fold_const_aggregate_ref (tree t)
3050 return fold_const_aggregate_ref_1 (t, NULL);
3053 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3054 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3055 KNOWN_BINFO carries the binfo describing the true type of
3056 OBJ_TYPE_REF_OBJECT(REF). */
3059 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3061 unsigned HOST_WIDE_INT offset, size;
3064 v = BINFO_VTABLE (known_binfo);
3065 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3069 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3071 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3072 v = TREE_OPERAND (v, 0);
3077 if (TREE_CODE (v) != ADDR_EXPR)
3079 v = TREE_OPERAND (v, 0);
3081 if (TREE_CODE (v) != VAR_DECL
3082 || !DECL_VIRTUAL_P (v)
3083 || !DECL_INITIAL (v)
3084 || DECL_INITIAL (v) == error_mark_node)
3086 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3087 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3088 offset += token * size;
3089 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3093 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3094 || TREE_CODE (fn) == FDESC_EXPR);
3095 fn = TREE_OPERAND (fn, 0);
3096 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3098 /* When cgraph node is missing and function is not public, we cannot
3099 devirtualize. This can happen in WHOPR when the actual method
3100 ends up in other partition, because we found devirtualization
3101 possibility too late. */
3102 if (!can_refer_decl_in_current_unit_p (fn))
3108 /* Return true iff VAL is a gimple expression that is known to be
3109 non-negative. Restricted to floating-point inputs. */
3112 gimple_val_nonnegative_real_p (tree val)
3116 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3118 /* Use existing logic for non-gimple trees. */
3119 if (tree_expr_nonnegative_p (val))
3122 if (TREE_CODE (val) != SSA_NAME)
3125 /* Currently we look only at the immediately defining statement
3126 to make this determination, since recursion on defining
3127 statements of operands can lead to quadratic behavior in the
3128 worst case. This is expected to catch almost all occurrences
3129 in practice. It would be possible to implement limited-depth
3130 recursion if important cases are lost. Alternatively, passes
3131 that need this information (such as the pow/powi lowering code
3132 in the cse_sincos pass) could be revised to provide it through
3133 dataflow propagation. */
3135 def_stmt = SSA_NAME_DEF_STMT (val);
3137 if (is_gimple_assign (def_stmt))
3141 /* See fold-const.c:tree_expr_nonnegative_p for additional
3142 cases that could be handled with recursion. */
3144 switch (gimple_assign_rhs_code (def_stmt))
3147 /* Always true for floating-point operands. */
3151 /* True if the two operands are identical (since we are
3152 restricted to floating-point inputs). */
3153 op0 = gimple_assign_rhs1 (def_stmt);
3154 op1 = gimple_assign_rhs2 (def_stmt);
3157 || operand_equal_p (op0, op1, 0))
3164 else if (is_gimple_call (def_stmt))
3166 tree fndecl = gimple_call_fndecl (def_stmt);
3168 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3172 switch (DECL_FUNCTION_CODE (fndecl))
3174 CASE_FLT_FN (BUILT_IN_ACOS):
3175 CASE_FLT_FN (BUILT_IN_ACOSH):
3176 CASE_FLT_FN (BUILT_IN_CABS):
3177 CASE_FLT_FN (BUILT_IN_COSH):
3178 CASE_FLT_FN (BUILT_IN_ERFC):
3179 CASE_FLT_FN (BUILT_IN_EXP):
3180 CASE_FLT_FN (BUILT_IN_EXP10):
3181 CASE_FLT_FN (BUILT_IN_EXP2):
3182 CASE_FLT_FN (BUILT_IN_FABS):
3183 CASE_FLT_FN (BUILT_IN_FDIM):
3184 CASE_FLT_FN (BUILT_IN_HYPOT):
3185 CASE_FLT_FN (BUILT_IN_POW10):
3188 CASE_FLT_FN (BUILT_IN_SQRT):
3189 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3190 nonnegative inputs. */
3191 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3196 CASE_FLT_FN (BUILT_IN_POWI):
3197 /* True if the second argument is an even integer. */
3198 arg1 = gimple_call_arg (def_stmt, 1);
3200 if (TREE_CODE (arg1) == INTEGER_CST
3201 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3206 CASE_FLT_FN (BUILT_IN_POW):
3207 /* True if the second argument is an even integer-valued
3209 arg1 = gimple_call_arg (def_stmt, 1);
3211 if (TREE_CODE (arg1) == REAL_CST)
3216 c = TREE_REAL_CST (arg1);
3217 n = real_to_integer (&c);
3221 REAL_VALUE_TYPE cint;
3222 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3223 if (real_identical (&c, &cint))