1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node || !node->analyzed || node->global.inlined_to)
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval)
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
140 if (cfun && base && TREE_CODE (base) == VAR_DECL)
141 add_referenced_var (base);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
144 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
153 get_symbol_constant_value (tree sym)
155 if (const_value_known_p (sym))
157 tree val = DECL_INITIAL (sym);
160 val = canonicalize_constructor_val (val);
161 if (val && is_gimple_min_invariant (val))
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
172 return build_zero_cst (TREE_TYPE (sym));
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
186 maybe_fold_reference (tree expr, bool is_lhs)
191 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr) == REALPART_EXPR
193 || TREE_CODE (expr) == IMAGPART_EXPR)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr),
198 TREE_OPERAND (expr, 0));
199 else if (TREE_CODE (expr) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr),
204 TREE_OPERAND (expr, 0),
205 TREE_OPERAND (expr, 1),
206 TREE_OPERAND (expr, 2));
208 while (handled_component_p (*t))
209 t = &TREE_OPERAND (*t, 0);
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
216 bool volatile_p = TREE_THIS_VOLATILE (*t);
217 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
218 TREE_OPERAND (*t, 0),
219 TREE_OPERAND (*t, 1));
222 TREE_THIS_VOLATILE (tem) = volatile_p;
224 tem = maybe_fold_reference (expr, is_lhs);
232 && (result = fold_const_aggregate_ref (expr))
233 && is_gimple_min_invariant (result))
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t, 1))
240 && (TREE_THIS_VOLATILE (*t)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
249 && types_compatible_p (TREE_TYPE (*t),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t, 0), 0))))
254 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
255 tem = maybe_fold_reference (expr, is_lhs);
260 else if (TREE_CODE (*t) == TARGET_MEM_REF)
262 tree tem = maybe_fold_tmr (*t);
266 tem = maybe_fold_reference (expr, is_lhs);
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
283 fold_gimple_assign (gimple_stmt_iterator *si)
285 gimple stmt = gsi_stmt (*si);
286 enum tree_code subcode = gimple_assign_rhs_code (stmt);
287 location_t loc = gimple_location (stmt);
289 tree result = NULL_TREE;
291 switch (get_gimple_rhs_class (subcode))
293 case GIMPLE_SINGLE_RHS:
295 tree rhs = gimple_assign_rhs1 (stmt);
297 if (REFERENCE_CLASS_P (rhs))
298 return maybe_fold_reference (rhs, false);
300 else if (TREE_CODE (rhs) == ADDR_EXPR)
302 tree ref = TREE_OPERAND (rhs, 0);
303 tree tem = maybe_fold_reference (ref, true);
305 && TREE_CODE (tem) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem, 1)))
307 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
309 result = fold_convert (TREE_TYPE (rhs),
310 build_fold_addr_expr_loc (loc, tem));
311 else if (TREE_CODE (ref) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
316 else if (TREE_CODE (rhs) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
326 if (TREE_CODE (val) != INTEGER_CST
327 && TREE_CODE (val) != REAL_CST
328 && TREE_CODE (val) != FIXED_CST)
331 return build_vector_from_ctor (TREE_TYPE (rhs),
332 CONSTRUCTOR_ELTS (rhs));
335 else if (DECL_P (rhs))
336 return unshare_expr (get_symbol_constant_value (rhs));
338 /* If we couldn't fold the RHS, hand over to the generic
340 if (result == NULL_TREE)
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result);
348 if (result != rhs && valid_gimple_rhs_p (result))
355 case GIMPLE_UNARY_RHS:
357 tree rhs = gimple_assign_rhs1 (stmt);
359 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode)
368 && TREE_CODE (result) == INTEGER_CST
369 && TREE_CODE (rhs) == INTEGER_CST)
370 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
372 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (valid_gimple_rhs_p (result))
379 case GIMPLE_BINARY_RHS:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt) == NE_EXPR)
385 tree lhs = gimple_assign_lhs (stmt);
386 tree op1 = gimple_assign_rhs1 (stmt);
387 tree op2 = gimple_assign_rhs2 (stmt);
388 tree type = TREE_TYPE (op1);
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
394 if (TREE_CODE (op2) == INTEGER_CST
395 && (integer_zerop (op2) || integer_onep (op2))
396 && useless_type_conversion_p (TREE_TYPE (lhs), type))
398 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
399 bool is_logical_not = false;
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
404 || (cmp_code == NE_EXPR && integer_onep (op2)))
405 is_logical_not = true;
407 if (is_logical_not == false)
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type) == 1)
412 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
414 /* Otherwise we use !X -> X ^ 1. */
416 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
417 type, op1, build_int_cst (type, 1));
423 result = fold_binary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt));
430 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (valid_gimple_rhs_p (result))
436 case GIMPLE_TERNARY_RHS:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
440 tree op0 = gimple_assign_rhs1 (stmt);
443 location_t cond_loc = gimple_location (stmt);
445 if (COMPARISON_CLASS_P (op0))
447 fold_defer_overflow_warnings ();
448 tem = fold_binary_loc (cond_loc,
449 TREE_CODE (op0), TREE_TYPE (op0),
450 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set = (tem && is_gimple_condexpr (tem)
456 && valid_gimple_rhs_p (tem));
457 fold_undefer_overflow_warnings (set, stmt, 0);
459 else if (is_gimple_min_invariant (op0))
468 result = fold_build3_loc (cond_loc, COND_EXPR,
469 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
470 gimple_assign_rhs2 (stmt),
471 gimple_assign_rhs3 (stmt));
475 result = fold_ternary_loc (loc, subcode,
476 TREE_TYPE (gimple_assign_lhs (stmt)),
477 gimple_assign_rhs1 (stmt),
478 gimple_assign_rhs2 (stmt),
479 gimple_assign_rhs3 (stmt));
483 STRIP_USELESS_TYPE_CONVERSION (result);
484 if (valid_gimple_rhs_p (result))
489 case GIMPLE_INVALID_RHS:
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
502 fold_gimple_cond (gimple stmt)
504 tree result = fold_binary_loc (gimple_location (stmt),
505 gimple_cond_code (stmt),
507 gimple_cond_lhs (stmt),
508 gimple_cond_rhs (stmt));
512 STRIP_USELESS_TYPE_CONVERSION (result);
513 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
515 gimple_cond_set_condition_from_tree (stmt, result);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
537 gimple stmt, new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = gimple_seq_alloc();
540 struct gimplify_ctx gctx;
545 stmt = gsi_stmt (*si_p);
547 gcc_assert (is_gimple_call (stmt));
549 push_gimplify_context (&gctx);
550 gctx.into_ssa = gimple_in_ssa_p (cfun);
552 lhs = gimple_call_lhs (stmt);
553 if (lhs == NULL_TREE)
555 gimplify_and_add (expr, &stmts);
556 /* We can end up with folding a memcpy of an empty class assignment
557 which gets optimized away by C++ gimplification. */
558 if (gimple_seq_empty_p (stmts))
560 pop_gimplify_context (NULL);
561 if (gimple_in_ssa_p (cfun))
563 unlink_stmt_vdef (stmt);
566 gsi_remove (si_p, true);
572 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
573 new_stmt = gimple_build_assign (lhs, tmp);
574 i = gsi_last (stmts);
575 gsi_insert_after_without_update (&i, new_stmt,
576 GSI_CONTINUE_LINKING);
579 pop_gimplify_context (NULL);
581 if (gimple_has_location (stmt))
582 annotate_all_with_location (stmts, gimple_location (stmt));
584 /* First iterate over the replacement statements backward, assigning
585 virtual operands to their defining statements. */
587 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
589 new_stmt = gsi_stmt (i);
590 if (gimple_assign_single_p (new_stmt)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
595 vdef = gimple_vdef (stmt);
597 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
598 gimple_set_vdef (new_stmt, vdef);
599 if (TREE_CODE (vdef) == SSA_NAME)
600 SSA_NAME_DEF_STMT (vdef) = new_stmt;
601 laststore = new_stmt;
605 /* Second iterate over the statements forward, assigning virtual
606 operands to their uses. */
608 reaching_vuse = gimple_vuse (stmt);
609 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
611 /* Do not insert the last stmt in this loop but remember it
612 for replacing the original statement. */
615 gsi_insert_before (si_p, last, GSI_NEW_STMT);
618 new_stmt = gsi_stmt (i);
619 /* The replacement can expose previously unreferenced variables. */
620 if (gimple_in_ssa_p (cfun))
621 find_new_referenced_vars (new_stmt);
622 /* If the new statement possibly has a VUSE, update it with exact SSA
623 name we know will reach this one. */
624 if (gimple_has_mem_ops (new_stmt))
625 gimple_set_vuse (new_stmt, reaching_vuse);
626 gimple_set_modified (new_stmt, true);
627 if (gimple_vdef (new_stmt))
628 reaching_vuse = gimple_vdef (new_stmt);
632 /* If the new sequence does not do a store release the virtual
633 definition of the original statement. */
635 && reaching_vuse == gimple_vuse (stmt))
637 tree vdef = gimple_vdef (stmt);
639 && TREE_CODE (vdef) == SSA_NAME)
641 unlink_stmt_vdef (stmt);
642 release_ssa_name (vdef);
646 /* Finally replace rhe original statement with the last. */
647 gsi_replace (si_p, last, false);
650 /* Return the string length, maximum string length or maximum value of
652 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
653 is not NULL and, for TYPE == 0, its value is not equal to the length
654 we determine or if we are unable to determine the length or value,
655 return false. VISITED is a bitmap of visited variables.
656 TYPE is 0 if string length should be returned, 1 for maximum string
657 length and 2 for maximum value ARG can have. */
660 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
665 if (TREE_CODE (arg) != SSA_NAME)
667 if (TREE_CODE (arg) == COND_EXPR)
668 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
669 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
670 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
671 else if (TREE_CODE (arg) == ADDR_EXPR
672 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
673 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
675 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
676 if (TREE_CODE (aop0) == INDIRECT_REF
677 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
678 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
679 length, visited, type);
685 if (TREE_CODE (val) != INTEGER_CST
686 || tree_int_cst_sgn (val) < 0)
690 val = c_strlen (arg, 1);
698 if (TREE_CODE (*length) != INTEGER_CST
699 || TREE_CODE (val) != INTEGER_CST)
702 if (tree_int_cst_lt (*length, val))
706 else if (simple_cst_equal (val, *length) != 1)
714 /* If we were already here, break the infinite cycle. */
715 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
719 def_stmt = SSA_NAME_DEF_STMT (var);
721 switch (gimple_code (def_stmt))
724 /* The RHS of the statement defining VAR must either have a
725 constant length or come from another SSA_NAME with a constant
727 if (gimple_assign_single_p (def_stmt)
728 || gimple_assign_unary_nop_p (def_stmt))
730 tree rhs = gimple_assign_rhs1 (def_stmt);
731 return get_maxval_strlen (rhs, length, visited, type);
737 /* All the arguments of the PHI node must have the same constant
741 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
743 tree arg = gimple_phi_arg (def_stmt, i)->def;
745 /* If this PHI has itself as an argument, we cannot
746 determine the string length of this argument. However,
747 if we can find a constant string length for the other
748 PHI args then we can still be sure that this is a
749 constant string length. So be optimistic and just
750 continue with the next argument. */
751 if (arg == gimple_phi_result (def_stmt))
754 if (!get_maxval_strlen (arg, length, visited, type))
766 /* Fold builtin call in statement STMT. Returns a simplified tree.
767 We may return a non-constant expression, including another call
768 to a different function and with different arguments, e.g.,
769 substituting memcpy for strcpy when the string length is known.
770 Note that some builtins expand into inline code that may not
771 be valid in GIMPLE. Callers must take care. */
774 gimple_fold_builtin (gimple stmt)
782 location_t loc = gimple_location (stmt);
784 gcc_assert (is_gimple_call (stmt));
786 ignore = (gimple_call_lhs (stmt) == NULL);
788 /* First try the generic builtin folder. If that succeeds, return the
790 result = fold_call_stmt (stmt, ignore);
798 /* Ignore MD builtins. */
799 callee = gimple_call_fndecl (stmt);
800 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
803 /* Give up for always_inline inline builtins until they are
805 if (avoid_folding_inline_builtin (callee))
808 /* If the builtin could not be folded, and it has no argument list,
810 nargs = gimple_call_num_args (stmt);
814 /* Limit the work only for builtins we know how to simplify. */
815 switch (DECL_FUNCTION_CODE (callee))
817 case BUILT_IN_STRLEN:
819 case BUILT_IN_FPUTS_UNLOCKED:
823 case BUILT_IN_STRCPY:
824 case BUILT_IN_STRNCPY:
828 case BUILT_IN_MEMCPY_CHK:
829 case BUILT_IN_MEMPCPY_CHK:
830 case BUILT_IN_MEMMOVE_CHK:
831 case BUILT_IN_MEMSET_CHK:
832 case BUILT_IN_STRNCPY_CHK:
836 case BUILT_IN_STRCPY_CHK:
837 case BUILT_IN_STPCPY_CHK:
841 case BUILT_IN_SNPRINTF_CHK:
842 case BUILT_IN_VSNPRINTF_CHK:
850 if (arg_idx >= nargs)
853 /* Try to use the dataflow information gathered by the CCP process. */
854 visited = BITMAP_ALLOC (NULL);
855 bitmap_clear (visited);
857 memset (val, 0, sizeof (val));
858 a = gimple_call_arg (stmt, arg_idx);
859 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
860 val[arg_idx] = NULL_TREE;
862 BITMAP_FREE (visited);
865 switch (DECL_FUNCTION_CODE (callee))
867 case BUILT_IN_STRLEN:
868 if (val[0] && nargs == 1)
871 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
873 /* If the result is not a valid gimple value, or not a cast
874 of a valid gimple value, then we cannot use the result. */
875 if (is_gimple_val (new_val)
876 || (CONVERT_EXPR_P (new_val)
877 && is_gimple_val (TREE_OPERAND (new_val, 0))))
882 case BUILT_IN_STRCPY:
883 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
884 result = fold_builtin_strcpy (loc, callee,
885 gimple_call_arg (stmt, 0),
886 gimple_call_arg (stmt, 1),
890 case BUILT_IN_STRNCPY:
891 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
892 result = fold_builtin_strncpy (loc, callee,
893 gimple_call_arg (stmt, 0),
894 gimple_call_arg (stmt, 1),
895 gimple_call_arg (stmt, 2),
901 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
902 gimple_call_arg (stmt, 1),
903 ignore, false, val[0]);
906 case BUILT_IN_FPUTS_UNLOCKED:
908 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
909 gimple_call_arg (stmt, 1),
910 ignore, true, val[0]);
913 case BUILT_IN_MEMCPY_CHK:
914 case BUILT_IN_MEMPCPY_CHK:
915 case BUILT_IN_MEMMOVE_CHK:
916 case BUILT_IN_MEMSET_CHK:
917 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
918 result = fold_builtin_memory_chk (loc, callee,
919 gimple_call_arg (stmt, 0),
920 gimple_call_arg (stmt, 1),
921 gimple_call_arg (stmt, 2),
922 gimple_call_arg (stmt, 3),
924 DECL_FUNCTION_CODE (callee));
927 case BUILT_IN_STRCPY_CHK:
928 case BUILT_IN_STPCPY_CHK:
929 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
930 result = fold_builtin_stxcpy_chk (loc, callee,
931 gimple_call_arg (stmt, 0),
932 gimple_call_arg (stmt, 1),
933 gimple_call_arg (stmt, 2),
935 DECL_FUNCTION_CODE (callee));
938 case BUILT_IN_STRNCPY_CHK:
939 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
940 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
941 gimple_call_arg (stmt, 1),
942 gimple_call_arg (stmt, 2),
943 gimple_call_arg (stmt, 3),
947 case BUILT_IN_SNPRINTF_CHK:
948 case BUILT_IN_VSNPRINTF_CHK:
949 if (val[1] && is_gimple_val (val[1]))
950 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
951 DECL_FUNCTION_CODE (callee));
958 if (result && ignore)
959 result = fold_ignored_result (result);
963 /* Generate code adjusting the first parameter of a call statement determined
967 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
969 gimple call_stmt = gsi_stmt (*gsi);
973 delta = convert_to_ptrofftype (delta);
974 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
975 parm = gimple_call_arg (call_stmt, 0);
976 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
977 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
978 add_referenced_var (tmp);
980 tmp = make_ssa_name (tmp, NULL);
981 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
982 SSA_NAME_DEF_STMT (tmp) = new_stmt;
983 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
984 gimple_call_set_arg (call_stmt, 0, tmp);
987 /* Return a binfo to be used for devirtualization of calls based on an object
988 represented by a declaration (i.e. a global or automatically allocated one)
989 or NULL if it cannot be found or is not safe. CST is expected to be an
990 ADDR_EXPR of such object or the function will return NULL. Currently it is
991 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
994 gimple_extract_devirt_binfo_from_cst (tree cst)
996 HOST_WIDE_INT offset, size, max_size;
997 tree base, type, expected_type, binfo;
998 bool last_artificial = false;
1000 if (!flag_devirtualize
1001 || TREE_CODE (cst) != ADDR_EXPR
1002 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1005 cst = TREE_OPERAND (cst, 0);
1006 expected_type = TREE_TYPE (cst);
1007 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1008 type = TREE_TYPE (base);
1012 || TREE_CODE (type) != RECORD_TYPE)
1015 /* Find the sub-object the constant actually refers to and mark whether it is
1016 an artificial one (as opposed to a user-defined one). */
1019 HOST_WIDE_INT pos, size;
1022 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1027 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1029 if (TREE_CODE (fld) != FIELD_DECL)
1032 pos = int_bit_position (fld);
1033 size = tree_low_cst (DECL_SIZE (fld), 1);
1034 if (pos <= offset && (pos + size) > offset)
1037 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1040 last_artificial = DECL_ARTIFICIAL (fld);
1041 type = TREE_TYPE (fld);
1044 /* Artifical sub-objects are ancestors, we do not want to use them for
1045 devirtualization, at least not here. */
1046 if (last_artificial)
1048 binfo = TYPE_BINFO (type);
1049 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1055 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1056 The statement may be replaced by another statement, e.g., if the call
1057 simplifies to a constant value. Return true if any changes were made.
1058 It is assumed that the operands have been previously folded. */
1061 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1063 gimple stmt = gsi_stmt (*gsi);
1066 /* Check for builtins that CCP can handle using information not
1067 available in the generic fold routines. */
1068 callee = gimple_call_fndecl (stmt);
1069 if (!inplace && callee && DECL_BUILT_IN (callee))
1071 tree result = gimple_fold_builtin (stmt);
1075 if (!update_call_from_tree (gsi, result))
1076 gimplify_and_update_call_from_tree (gsi, result);
1081 /* Check for virtual calls that became direct calls. */
1082 callee = gimple_call_fn (stmt);
1083 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1085 tree binfo, fndecl, obj;
1086 HOST_WIDE_INT token;
1088 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1090 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1094 obj = OBJ_TYPE_REF_OBJECT (callee);
1095 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1098 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1099 fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1102 gimple_call_set_fndecl (stmt, fndecl);
1109 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1110 distinguishes both cases. */
1113 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1115 bool changed = false;
1116 gimple stmt = gsi_stmt (*gsi);
1118 gimple_stmt_iterator gsinext = *gsi;
1121 gsi_next (&gsinext);
1122 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1124 /* Fold the main computation performed by the statement. */
1125 switch (gimple_code (stmt))
1129 unsigned old_num_ops = gimple_num_ops (stmt);
1130 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1131 tree lhs = gimple_assign_lhs (stmt);
1133 /* First canonicalize operand order. This avoids building new
1134 trees if this is the only thing fold would later do. */
1135 if ((commutative_tree_code (subcode)
1136 || commutative_ternary_tree_code (subcode))
1137 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1138 gimple_assign_rhs2 (stmt), false))
1140 tree tem = gimple_assign_rhs1 (stmt);
1141 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1142 gimple_assign_set_rhs2 (stmt, tem);
1145 new_rhs = fold_gimple_assign (gsi);
1147 && !useless_type_conversion_p (TREE_TYPE (lhs),
1148 TREE_TYPE (new_rhs)))
1149 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1152 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1154 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1161 changed |= fold_gimple_cond (stmt);
1165 /* Fold *& in call arguments. */
1166 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1167 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1169 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1172 gimple_call_set_arg (stmt, i, tmp);
1176 changed |= gimple_fold_call (gsi, inplace);
1180 /* Fold *& in asm operands. */
1183 const char **oconstraints;
1184 const char *constraint;
1185 bool allows_mem, allows_reg;
1187 noutputs = gimple_asm_noutputs (stmt);
1188 oconstraints = XALLOCAVEC (const char *, noutputs);
1190 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1192 tree link = gimple_asm_output_op (stmt, i);
1193 tree op = TREE_VALUE (link);
1195 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1196 if (REFERENCE_CLASS_P (op)
1197 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1199 TREE_VALUE (link) = op;
1203 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1205 tree link = gimple_asm_input_op (stmt, i);
1206 tree op = TREE_VALUE (link);
1208 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1209 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1210 oconstraints, &allows_mem, &allows_reg);
1211 if (REFERENCE_CLASS_P (op)
1212 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1215 TREE_VALUE (link) = op;
1223 if (gimple_debug_bind_p (stmt))
1225 tree val = gimple_debug_bind_get_value (stmt);
1227 && REFERENCE_CLASS_P (val))
1229 tree tem = maybe_fold_reference (val, false);
1232 gimple_debug_bind_set_value (stmt, tem);
1242 /* If stmt folds into nothing and it was the last stmt in a bb,
1243 don't call gsi_stmt. */
1244 if (gsi_end_p (*gsi))
1246 gcc_assert (next_stmt == NULL);
1250 stmt = gsi_stmt (*gsi);
1252 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1253 as we'd changing the next stmt. */
1254 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1256 tree lhs = gimple_get_lhs (stmt);
1257 if (lhs && REFERENCE_CLASS_P (lhs))
1259 tree new_lhs = maybe_fold_reference (lhs, true);
1262 gimple_set_lhs (stmt, new_lhs);
1271 /* Fold the statement pointed to by GSI. In some cases, this function may
1272 replace the whole statement with a new one. Returns true iff folding
1274 The statement pointed to by GSI should be in valid gimple form but may
1275 be in unfolded state as resulting from for example constant propagation
1276 which can produce *&x = 0. */
1279 fold_stmt (gimple_stmt_iterator *gsi)
1281 return fold_stmt_1 (gsi, false);
1284 /* Perform the minimal folding on statement *GSI. Only operations like
1285 *&x created by constant propagation are handled. The statement cannot
1286 be replaced with a new one. Return true if the statement was
1287 changed, false otherwise.
1288 The statement *GSI should be in valid gimple form but may
1289 be in unfolded state as resulting from for example constant propagation
1290 which can produce *&x = 0. */
1293 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1295 gimple stmt = gsi_stmt (*gsi);
1296 bool changed = fold_stmt_1 (gsi, true);
1297 gcc_assert (gsi_stmt (*gsi) == stmt);
1301 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1302 if EXPR is null or we don't know how.
1303 If non-null, the result always has boolean type. */
1306 canonicalize_bool (tree expr, bool invert)
1312 if (integer_nonzerop (expr))
1313 return boolean_false_node;
1314 else if (integer_zerop (expr))
1315 return boolean_true_node;
1316 else if (TREE_CODE (expr) == SSA_NAME)
1317 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1318 build_int_cst (TREE_TYPE (expr), 0));
1319 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1320 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1322 TREE_OPERAND (expr, 0),
1323 TREE_OPERAND (expr, 1));
1329 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1331 if (integer_nonzerop (expr))
1332 return boolean_true_node;
1333 else if (integer_zerop (expr))
1334 return boolean_false_node;
1335 else if (TREE_CODE (expr) == SSA_NAME)
1336 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1337 build_int_cst (TREE_TYPE (expr), 0));
1338 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1339 return fold_build2 (TREE_CODE (expr),
1341 TREE_OPERAND (expr, 0),
1342 TREE_OPERAND (expr, 1));
1348 /* Check to see if a boolean expression EXPR is logically equivalent to the
1349 comparison (OP1 CODE OP2). Check for various identities involving
1353 same_bool_comparison_p (const_tree expr, enum tree_code code,
1354 const_tree op1, const_tree op2)
1358 /* The obvious case. */
1359 if (TREE_CODE (expr) == code
1360 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1361 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1364 /* Check for comparing (name, name != 0) and the case where expr
1365 is an SSA_NAME with a definition matching the comparison. */
1366 if (TREE_CODE (expr) == SSA_NAME
1367 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1369 if (operand_equal_p (expr, op1, 0))
1370 return ((code == NE_EXPR && integer_zerop (op2))
1371 || (code == EQ_EXPR && integer_nonzerop (op2)));
1372 s = SSA_NAME_DEF_STMT (expr);
1373 if (is_gimple_assign (s)
1374 && gimple_assign_rhs_code (s) == code
1375 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1376 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1380 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1381 of name is a comparison, recurse. */
1382 if (TREE_CODE (op1) == SSA_NAME
1383 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1385 s = SSA_NAME_DEF_STMT (op1);
1386 if (is_gimple_assign (s)
1387 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1389 enum tree_code c = gimple_assign_rhs_code (s);
1390 if ((c == NE_EXPR && integer_zerop (op2))
1391 || (c == EQ_EXPR && integer_nonzerop (op2)))
1392 return same_bool_comparison_p (expr, c,
1393 gimple_assign_rhs1 (s),
1394 gimple_assign_rhs2 (s));
1395 if ((c == EQ_EXPR && integer_zerop (op2))
1396 || (c == NE_EXPR && integer_nonzerop (op2)))
1397 return same_bool_comparison_p (expr,
1398 invert_tree_comparison (c, false),
1399 gimple_assign_rhs1 (s),
1400 gimple_assign_rhs2 (s));
1406 /* Check to see if two boolean expressions OP1 and OP2 are logically
1410 same_bool_result_p (const_tree op1, const_tree op2)
1412 /* Simple cases first. */
1413 if (operand_equal_p (op1, op2, 0))
1416 /* Check the cases where at least one of the operands is a comparison.
1417 These are a bit smarter than operand_equal_p in that they apply some
1418 identifies on SSA_NAMEs. */
1419 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1420 && same_bool_comparison_p (op1, TREE_CODE (op2),
1421 TREE_OPERAND (op2, 0),
1422 TREE_OPERAND (op2, 1)))
1424 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1425 && same_bool_comparison_p (op2, TREE_CODE (op1),
1426 TREE_OPERAND (op1, 0),
1427 TREE_OPERAND (op1, 1)))
1434 /* Forward declarations for some mutually recursive functions. */
1437 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1438 enum tree_code code2, tree op2a, tree op2b);
1440 and_var_with_comparison (tree var, bool invert,
1441 enum tree_code code2, tree op2a, tree op2b);
1443 and_var_with_comparison_1 (gimple stmt,
1444 enum tree_code code2, tree op2a, tree op2b);
1446 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1447 enum tree_code code2, tree op2a, tree op2b);
1449 or_var_with_comparison (tree var, bool invert,
1450 enum tree_code code2, tree op2a, tree op2b);
1452 or_var_with_comparison_1 (gimple stmt,
1453 enum tree_code code2, tree op2a, tree op2b);
1455 /* Helper function for and_comparisons_1: try to simplify the AND of the
1456 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1457 If INVERT is true, invert the value of the VAR before doing the AND.
1458 Return NULL_EXPR if we can't simplify this to a single expression. */
1461 and_var_with_comparison (tree var, bool invert,
1462 enum tree_code code2, tree op2a, tree op2b)
1465 gimple stmt = SSA_NAME_DEF_STMT (var);
1467 /* We can only deal with variables whose definitions are assignments. */
1468 if (!is_gimple_assign (stmt))
1471 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1472 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1473 Then we only have to consider the simpler non-inverted cases. */
1475 t = or_var_with_comparison_1 (stmt,
1476 invert_tree_comparison (code2, false),
1479 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1480 return canonicalize_bool (t, invert);
1483 /* Try to simplify the AND of the ssa variable defined by the assignment
1484 STMT with the comparison specified by (OP2A CODE2 OP2B).
1485 Return NULL_EXPR if we can't simplify this to a single expression. */
1488 and_var_with_comparison_1 (gimple stmt,
1489 enum tree_code code2, tree op2a, tree op2b)
1491 tree var = gimple_assign_lhs (stmt);
1492 tree true_test_var = NULL_TREE;
1493 tree false_test_var = NULL_TREE;
1494 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1496 /* Check for identities like (var AND (var == 0)) => false. */
1497 if (TREE_CODE (op2a) == SSA_NAME
1498 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1500 if ((code2 == NE_EXPR && integer_zerop (op2b))
1501 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1503 true_test_var = op2a;
1504 if (var == true_test_var)
1507 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1508 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1510 false_test_var = op2a;
1511 if (var == false_test_var)
1512 return boolean_false_node;
1516 /* If the definition is a comparison, recurse on it. */
1517 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1519 tree t = and_comparisons_1 (innercode,
1520 gimple_assign_rhs1 (stmt),
1521 gimple_assign_rhs2 (stmt),
1529 /* If the definition is an AND or OR expression, we may be able to
1530 simplify by reassociating. */
1531 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1532 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1534 tree inner1 = gimple_assign_rhs1 (stmt);
1535 tree inner2 = gimple_assign_rhs2 (stmt);
1538 tree partial = NULL_TREE;
1539 bool is_and = (innercode == BIT_AND_EXPR);
1541 /* Check for boolean identities that don't require recursive examination
1543 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1544 inner1 AND (inner1 OR inner2) => inner1
1545 !inner1 AND (inner1 AND inner2) => false
1546 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1547 Likewise for similar cases involving inner2. */
1548 if (inner1 == true_test_var)
1549 return (is_and ? var : inner1);
1550 else if (inner2 == true_test_var)
1551 return (is_and ? var : inner2);
1552 else if (inner1 == false_test_var)
1554 ? boolean_false_node
1555 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1556 else if (inner2 == false_test_var)
1558 ? boolean_false_node
1559 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1561 /* Next, redistribute/reassociate the AND across the inner tests.
1562 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1563 if (TREE_CODE (inner1) == SSA_NAME
1564 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1565 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1566 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1567 gimple_assign_rhs1 (s),
1568 gimple_assign_rhs2 (s),
1569 code2, op2a, op2b)))
1571 /* Handle the AND case, where we are reassociating:
1572 (inner1 AND inner2) AND (op2a code2 op2b)
1574 If the partial result t is a constant, we win. Otherwise
1575 continue on to try reassociating with the other inner test. */
1578 if (integer_onep (t))
1580 else if (integer_zerop (t))
1581 return boolean_false_node;
1584 /* Handle the OR case, where we are redistributing:
1585 (inner1 OR inner2) AND (op2a code2 op2b)
1586 => (t OR (inner2 AND (op2a code2 op2b))) */
1587 else if (integer_onep (t))
1588 return boolean_true_node;
1590 /* Save partial result for later. */
1594 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1595 if (TREE_CODE (inner2) == SSA_NAME
1596 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1597 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1598 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1599 gimple_assign_rhs1 (s),
1600 gimple_assign_rhs2 (s),
1601 code2, op2a, op2b)))
1603 /* Handle the AND case, where we are reassociating:
1604 (inner1 AND inner2) AND (op2a code2 op2b)
1605 => (inner1 AND t) */
1608 if (integer_onep (t))
1610 else if (integer_zerop (t))
1611 return boolean_false_node;
1612 /* If both are the same, we can apply the identity
1614 else if (partial && same_bool_result_p (t, partial))
1618 /* Handle the OR case. where we are redistributing:
1619 (inner1 OR inner2) AND (op2a code2 op2b)
1620 => (t OR (inner1 AND (op2a code2 op2b)))
1621 => (t OR partial) */
1624 if (integer_onep (t))
1625 return boolean_true_node;
1628 /* We already got a simplification for the other
1629 operand to the redistributed OR expression. The
1630 interesting case is when at least one is false.
1631 Or, if both are the same, we can apply the identity
1633 if (integer_zerop (partial))
1635 else if (integer_zerop (t))
1637 else if (same_bool_result_p (t, partial))
1646 /* Try to simplify the AND of two comparisons defined by
1647 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1648 If this can be done without constructing an intermediate value,
1649 return the resulting tree; otherwise NULL_TREE is returned.
1650 This function is deliberately asymmetric as it recurses on SSA_DEFs
1651 in the first comparison but not the second. */
1654 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1655 enum tree_code code2, tree op2a, tree op2b)
1657 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1658 if (operand_equal_p (op1a, op2a, 0)
1659 && operand_equal_p (op1b, op2b, 0))
1661 /* Result will be either NULL_TREE, or a combined comparison. */
1662 tree t = combine_comparisons (UNKNOWN_LOCATION,
1663 TRUTH_ANDIF_EXPR, code1, code2,
1664 boolean_type_node, op1a, op1b);
1669 /* Likewise the swapped case of the above. */
1670 if (operand_equal_p (op1a, op2b, 0)
1671 && operand_equal_p (op1b, op2a, 0))
1673 /* Result will be either NULL_TREE, or a combined comparison. */
1674 tree t = combine_comparisons (UNKNOWN_LOCATION,
1675 TRUTH_ANDIF_EXPR, code1,
1676 swap_tree_comparison (code2),
1677 boolean_type_node, op1a, op1b);
1682 /* If both comparisons are of the same value against constants, we might
1683 be able to merge them. */
1684 if (operand_equal_p (op1a, op2a, 0)
1685 && TREE_CODE (op1b) == INTEGER_CST
1686 && TREE_CODE (op2b) == INTEGER_CST)
1688 int cmp = tree_int_cst_compare (op1b, op2b);
1690 /* If we have (op1a == op1b), we should either be able to
1691 return that or FALSE, depending on whether the constant op1b
1692 also satisfies the other comparison against op2b. */
1693 if (code1 == EQ_EXPR)
1699 case EQ_EXPR: val = (cmp == 0); break;
1700 case NE_EXPR: val = (cmp != 0); break;
1701 case LT_EXPR: val = (cmp < 0); break;
1702 case GT_EXPR: val = (cmp > 0); break;
1703 case LE_EXPR: val = (cmp <= 0); break;
1704 case GE_EXPR: val = (cmp >= 0); break;
1705 default: done = false;
1710 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1712 return boolean_false_node;
1715 /* Likewise if the second comparison is an == comparison. */
1716 else if (code2 == EQ_EXPR)
1722 case EQ_EXPR: val = (cmp == 0); break;
1723 case NE_EXPR: val = (cmp != 0); break;
1724 case LT_EXPR: val = (cmp > 0); break;
1725 case GT_EXPR: val = (cmp < 0); break;
1726 case LE_EXPR: val = (cmp >= 0); break;
1727 case GE_EXPR: val = (cmp <= 0); break;
1728 default: done = false;
1733 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1735 return boolean_false_node;
1739 /* Same business with inequality tests. */
1740 else if (code1 == NE_EXPR)
1745 case EQ_EXPR: val = (cmp != 0); break;
1746 case NE_EXPR: val = (cmp == 0); break;
1747 case LT_EXPR: val = (cmp >= 0); break;
1748 case GT_EXPR: val = (cmp <= 0); break;
1749 case LE_EXPR: val = (cmp > 0); break;
1750 case GE_EXPR: val = (cmp < 0); break;
1755 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1757 else if (code2 == NE_EXPR)
1762 case EQ_EXPR: val = (cmp == 0); break;
1763 case NE_EXPR: val = (cmp != 0); break;
1764 case LT_EXPR: val = (cmp <= 0); break;
1765 case GT_EXPR: val = (cmp >= 0); break;
1766 case LE_EXPR: val = (cmp < 0); break;
1767 case GE_EXPR: val = (cmp > 0); break;
1772 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1775 /* Chose the more restrictive of two < or <= comparisons. */
1776 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1777 && (code2 == LT_EXPR || code2 == LE_EXPR))
1779 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1780 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1782 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1785 /* Likewise chose the more restrictive of two > or >= comparisons. */
1786 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1787 && (code2 == GT_EXPR || code2 == GE_EXPR))
1789 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1790 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1792 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1795 /* Check for singleton ranges. */
1797 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1798 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1799 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1801 /* Check for disjoint ranges. */
1803 && (code1 == LT_EXPR || code1 == LE_EXPR)
1804 && (code2 == GT_EXPR || code2 == GE_EXPR))
1805 return boolean_false_node;
1807 && (code1 == GT_EXPR || code1 == GE_EXPR)
1808 && (code2 == LT_EXPR || code2 == LE_EXPR))
1809 return boolean_false_node;
1812 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1813 NAME's definition is a truth value. See if there are any simplifications
1814 that can be done against the NAME's definition. */
1815 if (TREE_CODE (op1a) == SSA_NAME
1816 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1817 && (integer_zerop (op1b) || integer_onep (op1b)))
1819 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1820 || (code1 == NE_EXPR && integer_onep (op1b)));
1821 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1822 switch (gimple_code (stmt))
1825 /* Try to simplify by copy-propagating the definition. */
1826 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1829 /* If every argument to the PHI produces the same result when
1830 ANDed with the second comparison, we win.
1831 Do not do this unless the type is bool since we need a bool
1832 result here anyway. */
1833 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1835 tree result = NULL_TREE;
1837 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1839 tree arg = gimple_phi_arg_def (stmt, i);
1841 /* If this PHI has itself as an argument, ignore it.
1842 If all the other args produce the same result,
1844 if (arg == gimple_phi_result (stmt))
1846 else if (TREE_CODE (arg) == INTEGER_CST)
1848 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1851 result = boolean_false_node;
1852 else if (!integer_zerop (result))
1856 result = fold_build2 (code2, boolean_type_node,
1858 else if (!same_bool_comparison_p (result,
1862 else if (TREE_CODE (arg) == SSA_NAME
1863 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1866 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1867 /* In simple cases we can look through PHI nodes,
1868 but we have to be careful with loops.
1870 if (! dom_info_available_p (CDI_DOMINATORS)
1871 || gimple_bb (def_stmt) == gimple_bb (stmt)
1872 || dominated_by_p (CDI_DOMINATORS,
1873 gimple_bb (def_stmt),
1876 temp = and_var_with_comparison (arg, invert, code2,
1882 else if (!same_bool_result_p (result, temp))
1898 /* Try to simplify the AND of two comparisons, specified by
1899 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1900 If this can be simplified to a single expression (without requiring
1901 introducing more SSA variables to hold intermediate values),
1902 return the resulting tree. Otherwise return NULL_TREE.
1903 If the result expression is non-null, it has boolean type. */
1906 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1907 enum tree_code code2, tree op2a, tree op2b)
1909 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1913 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1916 /* Helper function for or_comparisons_1: try to simplify the OR of the
1917 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1918 If INVERT is true, invert the value of VAR before doing the OR.
1919 Return NULL_EXPR if we can't simplify this to a single expression. */
1922 or_var_with_comparison (tree var, bool invert,
1923 enum tree_code code2, tree op2a, tree op2b)
1926 gimple stmt = SSA_NAME_DEF_STMT (var);
1928 /* We can only deal with variables whose definitions are assignments. */
1929 if (!is_gimple_assign (stmt))
1932 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1933 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1934 Then we only have to consider the simpler non-inverted cases. */
1936 t = and_var_with_comparison_1 (stmt,
1937 invert_tree_comparison (code2, false),
1940 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1941 return canonicalize_bool (t, invert);
1944 /* Try to simplify the OR of the ssa variable defined by the assignment
1945 STMT with the comparison specified by (OP2A CODE2 OP2B).
1946 Return NULL_EXPR if we can't simplify this to a single expression. */
1949 or_var_with_comparison_1 (gimple stmt,
1950 enum tree_code code2, tree op2a, tree op2b)
1952 tree var = gimple_assign_lhs (stmt);
1953 tree true_test_var = NULL_TREE;
1954 tree false_test_var = NULL_TREE;
1955 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1957 /* Check for identities like (var OR (var != 0)) => true . */
1958 if (TREE_CODE (op2a) == SSA_NAME
1959 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1961 if ((code2 == NE_EXPR && integer_zerop (op2b))
1962 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1964 true_test_var = op2a;
1965 if (var == true_test_var)
1968 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1969 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1971 false_test_var = op2a;
1972 if (var == false_test_var)
1973 return boolean_true_node;
1977 /* If the definition is a comparison, recurse on it. */
1978 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1980 tree t = or_comparisons_1 (innercode,
1981 gimple_assign_rhs1 (stmt),
1982 gimple_assign_rhs2 (stmt),
1990 /* If the definition is an AND or OR expression, we may be able to
1991 simplify by reassociating. */
1992 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1993 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1995 tree inner1 = gimple_assign_rhs1 (stmt);
1996 tree inner2 = gimple_assign_rhs2 (stmt);
1999 tree partial = NULL_TREE;
2000 bool is_or = (innercode == BIT_IOR_EXPR);
2002 /* Check for boolean identities that don't require recursive examination
2004 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2005 inner1 OR (inner1 AND inner2) => inner1
2006 !inner1 OR (inner1 OR inner2) => true
2007 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2009 if (inner1 == true_test_var)
2010 return (is_or ? var : inner1);
2011 else if (inner2 == true_test_var)
2012 return (is_or ? var : inner2);
2013 else if (inner1 == false_test_var)
2016 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2017 else if (inner2 == false_test_var)
2020 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2022 /* Next, redistribute/reassociate the OR across the inner tests.
2023 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2024 if (TREE_CODE (inner1) == SSA_NAME
2025 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2026 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2027 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2028 gimple_assign_rhs1 (s),
2029 gimple_assign_rhs2 (s),
2030 code2, op2a, op2b)))
2032 /* Handle the OR case, where we are reassociating:
2033 (inner1 OR inner2) OR (op2a code2 op2b)
2035 If the partial result t is a constant, we win. Otherwise
2036 continue on to try reassociating with the other inner test. */
2039 if (integer_onep (t))
2040 return boolean_true_node;
2041 else if (integer_zerop (t))
2045 /* Handle the AND case, where we are redistributing:
2046 (inner1 AND inner2) OR (op2a code2 op2b)
2047 => (t AND (inner2 OR (op2a code op2b))) */
2048 else if (integer_zerop (t))
2049 return boolean_false_node;
2051 /* Save partial result for later. */
2055 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2056 if (TREE_CODE (inner2) == SSA_NAME
2057 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2058 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2059 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2060 gimple_assign_rhs1 (s),
2061 gimple_assign_rhs2 (s),
2062 code2, op2a, op2b)))
2064 /* Handle the OR case, where we are reassociating:
2065 (inner1 OR inner2) OR (op2a code2 op2b)
2067 => (t OR partial) */
2070 if (integer_zerop (t))
2072 else if (integer_onep (t))
2073 return boolean_true_node;
2074 /* If both are the same, we can apply the identity
2076 else if (partial && same_bool_result_p (t, partial))
2080 /* Handle the AND case, where we are redistributing:
2081 (inner1 AND inner2) OR (op2a code2 op2b)
2082 => (t AND (inner1 OR (op2a code2 op2b)))
2083 => (t AND partial) */
2086 if (integer_zerop (t))
2087 return boolean_false_node;
2090 /* We already got a simplification for the other
2091 operand to the redistributed AND expression. The
2092 interesting case is when at least one is true.
2093 Or, if both are the same, we can apply the identity
2095 if (integer_onep (partial))
2097 else if (integer_onep (t))
2099 else if (same_bool_result_p (t, partial))
2108 /* Try to simplify the OR of two comparisons defined by
2109 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2110 If this can be done without constructing an intermediate value,
2111 return the resulting tree; otherwise NULL_TREE is returned.
2112 This function is deliberately asymmetric as it recurses on SSA_DEFs
2113 in the first comparison but not the second. */
2116 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2117 enum tree_code code2, tree op2a, tree op2b)
2119 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2120 if (operand_equal_p (op1a, op2a, 0)
2121 && operand_equal_p (op1b, op2b, 0))
2123 /* Result will be either NULL_TREE, or a combined comparison. */
2124 tree t = combine_comparisons (UNKNOWN_LOCATION,
2125 TRUTH_ORIF_EXPR, code1, code2,
2126 boolean_type_node, op1a, op1b);
2131 /* Likewise the swapped case of the above. */
2132 if (operand_equal_p (op1a, op2b, 0)
2133 && operand_equal_p (op1b, op2a, 0))
2135 /* Result will be either NULL_TREE, or a combined comparison. */
2136 tree t = combine_comparisons (UNKNOWN_LOCATION,
2137 TRUTH_ORIF_EXPR, code1,
2138 swap_tree_comparison (code2),
2139 boolean_type_node, op1a, op1b);
2144 /* If both comparisons are of the same value against constants, we might
2145 be able to merge them. */
2146 if (operand_equal_p (op1a, op2a, 0)
2147 && TREE_CODE (op1b) == INTEGER_CST
2148 && TREE_CODE (op2b) == INTEGER_CST)
2150 int cmp = tree_int_cst_compare (op1b, op2b);
2152 /* If we have (op1a != op1b), we should either be able to
2153 return that or TRUE, depending on whether the constant op1b
2154 also satisfies the other comparison against op2b. */
2155 if (code1 == NE_EXPR)
2161 case EQ_EXPR: val = (cmp == 0); break;
2162 case NE_EXPR: val = (cmp != 0); break;
2163 case LT_EXPR: val = (cmp < 0); break;
2164 case GT_EXPR: val = (cmp > 0); break;
2165 case LE_EXPR: val = (cmp <= 0); break;
2166 case GE_EXPR: val = (cmp >= 0); break;
2167 default: done = false;
2172 return boolean_true_node;
2174 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2177 /* Likewise if the second comparison is a != comparison. */
2178 else if (code2 == NE_EXPR)
2184 case EQ_EXPR: val = (cmp == 0); break;
2185 case NE_EXPR: val = (cmp != 0); break;
2186 case LT_EXPR: val = (cmp > 0); break;
2187 case GT_EXPR: val = (cmp < 0); break;
2188 case LE_EXPR: val = (cmp >= 0); break;
2189 case GE_EXPR: val = (cmp <= 0); break;
2190 default: done = false;
2195 return boolean_true_node;
2197 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2201 /* See if an equality test is redundant with the other comparison. */
2202 else if (code1 == EQ_EXPR)
2207 case EQ_EXPR: val = (cmp == 0); break;
2208 case NE_EXPR: val = (cmp != 0); break;
2209 case LT_EXPR: val = (cmp < 0); break;
2210 case GT_EXPR: val = (cmp > 0); break;
2211 case LE_EXPR: val = (cmp <= 0); break;
2212 case GE_EXPR: val = (cmp >= 0); break;
2217 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2219 else if (code2 == EQ_EXPR)
2224 case EQ_EXPR: val = (cmp == 0); break;
2225 case NE_EXPR: val = (cmp != 0); break;
2226 case LT_EXPR: val = (cmp > 0); break;
2227 case GT_EXPR: val = (cmp < 0); break;
2228 case LE_EXPR: val = (cmp >= 0); break;
2229 case GE_EXPR: val = (cmp <= 0); break;
2234 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2237 /* Chose the less restrictive of two < or <= comparisons. */
2238 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2239 && (code2 == LT_EXPR || code2 == LE_EXPR))
2241 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2244 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2247 /* Likewise chose the less restrictive of two > or >= comparisons. */
2248 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2249 && (code2 == GT_EXPR || code2 == GE_EXPR))
2251 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2252 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2254 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2257 /* Check for singleton ranges. */
2259 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2260 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2261 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2263 /* Check for less/greater pairs that don't restrict the range at all. */
2265 && (code1 == LT_EXPR || code1 == LE_EXPR)
2266 && (code2 == GT_EXPR || code2 == GE_EXPR))
2267 return boolean_true_node;
2269 && (code1 == GT_EXPR || code1 == GE_EXPR)
2270 && (code2 == LT_EXPR || code2 == LE_EXPR))
2271 return boolean_true_node;
2274 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2275 NAME's definition is a truth value. See if there are any simplifications
2276 that can be done against the NAME's definition. */
2277 if (TREE_CODE (op1a) == SSA_NAME
2278 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2279 && (integer_zerop (op1b) || integer_onep (op1b)))
2281 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2282 || (code1 == NE_EXPR && integer_onep (op1b)));
2283 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2284 switch (gimple_code (stmt))
2287 /* Try to simplify by copy-propagating the definition. */
2288 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2291 /* If every argument to the PHI produces the same result when
2292 ORed with the second comparison, we win.
2293 Do not do this unless the type is bool since we need a bool
2294 result here anyway. */
2295 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2297 tree result = NULL_TREE;
2299 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2301 tree arg = gimple_phi_arg_def (stmt, i);
2303 /* If this PHI has itself as an argument, ignore it.
2304 If all the other args produce the same result,
2306 if (arg == gimple_phi_result (stmt))
2308 else if (TREE_CODE (arg) == INTEGER_CST)
2310 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2313 result = boolean_true_node;
2314 else if (!integer_onep (result))
2318 result = fold_build2 (code2, boolean_type_node,
2320 else if (!same_bool_comparison_p (result,
2324 else if (TREE_CODE (arg) == SSA_NAME
2325 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2328 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2329 /* In simple cases we can look through PHI nodes,
2330 but we have to be careful with loops.
2332 if (! dom_info_available_p (CDI_DOMINATORS)
2333 || gimple_bb (def_stmt) == gimple_bb (stmt)
2334 || dominated_by_p (CDI_DOMINATORS,
2335 gimple_bb (def_stmt),
2338 temp = or_var_with_comparison (arg, invert, code2,
2344 else if (!same_bool_result_p (result, temp))
2360 /* Try to simplify the OR of two comparisons, specified by
2361 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2362 If this can be simplified to a single expression (without requiring
2363 introducing more SSA variables to hold intermediate values),
2364 return the resulting tree. Otherwise return NULL_TREE.
2365 If the result expression is non-null, it has boolean type. */
2368 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2369 enum tree_code code2, tree op2a, tree op2b)
2371 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2375 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2379 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2381 Either NULL_TREE, a simplified but non-constant or a constant
2384 ??? This should go into a gimple-fold-inline.h file to be eventually
2385 privatized with the single valueize function used in the various TUs
2386 to avoid the indirect function call overhead. */
2389 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2391 location_t loc = gimple_location (stmt);
2392 switch (gimple_code (stmt))
2396 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2398 switch (get_gimple_rhs_class (subcode))
2400 case GIMPLE_SINGLE_RHS:
2402 tree rhs = gimple_assign_rhs1 (stmt);
2403 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2405 if (TREE_CODE (rhs) == SSA_NAME)
2407 /* If the RHS is an SSA_NAME, return its known constant value,
2409 return (*valueize) (rhs);
2411 /* Handle propagating invariant addresses into address
2413 else if (TREE_CODE (rhs) == ADDR_EXPR
2414 && !is_gimple_min_invariant (rhs))
2416 HOST_WIDE_INT offset;
2418 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2422 && (CONSTANT_CLASS_P (base)
2423 || decl_address_invariant_p (base)))
2424 return build_invariant_address (TREE_TYPE (rhs),
2427 else if (TREE_CODE (rhs) == CONSTRUCTOR
2428 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2429 && (CONSTRUCTOR_NELTS (rhs)
2430 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2436 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2438 val = (*valueize) (val);
2439 if (TREE_CODE (val) == INTEGER_CST
2440 || TREE_CODE (val) == REAL_CST
2441 || TREE_CODE (val) == FIXED_CST)
2442 list = tree_cons (NULL_TREE, val, list);
2447 return build_vector (TREE_TYPE (rhs), nreverse (list));
2450 if (kind == tcc_reference)
2452 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2453 || TREE_CODE (rhs) == REALPART_EXPR
2454 || TREE_CODE (rhs) == IMAGPART_EXPR)
2455 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2457 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2458 return fold_unary_loc (EXPR_LOCATION (rhs),
2460 TREE_TYPE (rhs), val);
2462 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2463 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2465 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2466 return fold_ternary_loc (EXPR_LOCATION (rhs),
2468 TREE_TYPE (rhs), val,
2469 TREE_OPERAND (rhs, 1),
2470 TREE_OPERAND (rhs, 2));
2472 else if (TREE_CODE (rhs) == MEM_REF
2473 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2475 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2476 if (TREE_CODE (val) == ADDR_EXPR
2477 && is_gimple_min_invariant (val))
2479 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2481 TREE_OPERAND (rhs, 1));
2486 return fold_const_aggregate_ref_1 (rhs, valueize);
2488 else if (kind == tcc_declaration)
2489 return get_symbol_constant_value (rhs);
2493 case GIMPLE_UNARY_RHS:
2495 /* Handle unary operators that can appear in GIMPLE form.
2496 Note that we know the single operand must be a constant,
2497 so this should almost always return a simplified RHS. */
2498 tree lhs = gimple_assign_lhs (stmt);
2499 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2501 /* Conversions are useless for CCP purposes if they are
2502 value-preserving. Thus the restrictions that
2503 useless_type_conversion_p places for restrict qualification
2504 of pointer types should not apply here.
2505 Substitution later will only substitute to allowed places. */
2506 if (CONVERT_EXPR_CODE_P (subcode)
2507 && POINTER_TYPE_P (TREE_TYPE (lhs))
2508 && POINTER_TYPE_P (TREE_TYPE (op0))
2509 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2510 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2514 fold_unary_ignore_overflow_loc (loc, subcode,
2515 gimple_expr_type (stmt), op0);
2518 case GIMPLE_BINARY_RHS:
2520 /* Handle binary operators that can appear in GIMPLE form. */
2521 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2522 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2524 /* Translate &x + CST into an invariant form suitable for
2525 further propagation. */
2526 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2527 && TREE_CODE (op0) == ADDR_EXPR
2528 && TREE_CODE (op1) == INTEGER_CST)
2530 tree off = fold_convert (ptr_type_node, op1);
2531 return build_fold_addr_expr_loc
2533 fold_build2 (MEM_REF,
2534 TREE_TYPE (TREE_TYPE (op0)),
2535 unshare_expr (op0), off));
2538 return fold_binary_loc (loc, subcode,
2539 gimple_expr_type (stmt), op0, op1);
2542 case GIMPLE_TERNARY_RHS:
2544 /* Handle ternary operators that can appear in GIMPLE form. */
2545 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2546 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2547 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2549 /* Fold embedded expressions in ternary codes. */
2550 if ((subcode == COND_EXPR
2551 || subcode == VEC_COND_EXPR)
2552 && COMPARISON_CLASS_P (op0))
2554 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2555 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2556 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2557 TREE_TYPE (op0), op00, op01);
2562 return fold_ternary_loc (loc, subcode,
2563 gimple_expr_type (stmt), op0, op1, op2);
2575 if (gimple_call_internal_p (stmt))
2576 /* No folding yet for these functions. */
2579 fn = (*valueize) (gimple_call_fn (stmt));
2580 if (TREE_CODE (fn) == ADDR_EXPR
2581 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2582 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2584 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2587 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2588 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2589 call = build_call_array_loc (loc,
2590 gimple_call_return_type (stmt),
2591 fn, gimple_call_num_args (stmt), args);
2592 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2594 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2595 STRIP_NOPS (retval);
2606 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2607 Returns NULL_TREE if folding to a constant is not possible, otherwise
2608 returns a constant according to is_gimple_min_invariant. */
2611 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2613 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2614 if (res && is_gimple_min_invariant (res))
2620 /* The following set of functions are supposed to fold references using
2621 their constant initializers. */
2623 static tree fold_ctor_reference (tree type, tree ctor,
2624 unsigned HOST_WIDE_INT offset,
2625 unsigned HOST_WIDE_INT size);
2627 /* See if we can find constructor defining value of BASE.
2628 When we know the consructor with constant offset (such as
2629 base is array[40] and we do know constructor of array), then
2630 BIT_OFFSET is adjusted accordingly.
2632 As a special case, return error_mark_node when constructor
2633 is not explicitly available, but it is known to be zero
2634 such as 'static const int a;'. */
2636 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2637 tree (*valueize)(tree))
2639 HOST_WIDE_INT bit_offset2, size, max_size;
2640 if (TREE_CODE (base) == MEM_REF)
2642 if (!integer_zerop (TREE_OPERAND (base, 1)))
2644 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2646 *bit_offset += (mem_ref_offset (base).low
2651 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2652 base = valueize (TREE_OPERAND (base, 0));
2653 if (!base || TREE_CODE (base) != ADDR_EXPR)
2655 base = TREE_OPERAND (base, 0);
2658 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2659 DECL_INITIAL. If BASE is a nested reference into another
2660 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2661 the inner reference. */
2662 switch (TREE_CODE (base))
2665 if (!const_value_known_p (base))
2670 if (!DECL_INITIAL (base)
2671 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2672 return error_mark_node;
2673 return DECL_INITIAL (base);
2677 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2678 if (max_size == -1 || size != max_size)
2680 *bit_offset += bit_offset2;
2681 return get_base_constructor (base, bit_offset, valueize);
2692 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2693 to the memory at bit OFFSET.
2695 We do only simple job of folding byte accesses. */
2698 fold_string_cst_ctor_reference (tree type, tree ctor,
2699 unsigned HOST_WIDE_INT offset,
2700 unsigned HOST_WIDE_INT size)
2702 if (INTEGRAL_TYPE_P (type)
2703 && (TYPE_MODE (type)
2704 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2705 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2707 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2708 && size == BITS_PER_UNIT
2709 && !(offset % BITS_PER_UNIT))
2711 offset /= BITS_PER_UNIT;
2712 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2713 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2716 const char a[20]="hello";
2719 might lead to offset greater than string length. In this case we
2720 know value is either initialized to 0 or out of bounds. Return 0
2722 return build_zero_cst (type);
2727 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2728 SIZE to the memory at bit OFFSET. */
2731 fold_array_ctor_reference (tree type, tree ctor,
2732 unsigned HOST_WIDE_INT offset,
2733 unsigned HOST_WIDE_INT size)
2735 unsigned HOST_WIDE_INT cnt;
2737 double_int low_bound, elt_size;
2738 double_int index, max_index;
2739 double_int access_index;
2740 tree domain_type = NULL_TREE;
2741 HOST_WIDE_INT inner_offset;
2743 /* Compute low bound and elt size. */
2744 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2745 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2746 if (domain_type && TYPE_MIN_VALUE (domain_type))
2748 /* Static constructors for variably sized objects makes no sense. */
2749 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2750 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2753 low_bound = double_int_zero;
2754 /* Static constructors for variably sized objects makes no sense. */
2755 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2758 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2761 /* We can handle only constantly sized accesses that are known to not
2762 be larger than size of array element. */
2763 if (!TYPE_SIZE_UNIT (type)
2764 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2765 || double_int_cmp (elt_size,
2766 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2769 /* Compute the array index we look for. */
2770 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2771 elt_size, TRUNC_DIV_EXPR);
2772 access_index = double_int_add (access_index, low_bound);
2774 /* And offset within the access. */
2775 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2777 /* See if the array field is large enough to span whole access. We do not
2778 care to fold accesses spanning multiple array indexes. */
2779 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2782 index = double_int_sub (low_bound, double_int_one);
2783 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2785 /* Array constructor might explicitely set index, or specify range
2786 or leave index NULL meaning that it is next index after previous
2790 if (TREE_CODE (cfield) == INTEGER_CST)
2791 max_index = index = tree_to_double_int (cfield);
2794 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2795 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2796 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2800 max_index = index = double_int_add (index, double_int_one);
2802 /* Do we have match? */
2803 if (double_int_cmp (access_index, index, 1) >= 0
2804 && double_int_cmp (access_index, max_index, 1) <= 0)
2805 return fold_ctor_reference (type, cval, inner_offset, size);
2807 /* When memory is not explicitely mentioned in constructor,
2808 it is 0 (or out of range). */
2809 return build_zero_cst (type);
2812 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2813 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2816 fold_nonarray_ctor_reference (tree type, tree ctor,
2817 unsigned HOST_WIDE_INT offset,
2818 unsigned HOST_WIDE_INT size)
2820 unsigned HOST_WIDE_INT cnt;
2823 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2826 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2827 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2828 tree field_size = DECL_SIZE (cfield);
2829 double_int bitoffset;
2830 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2831 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2832 double_int bitoffset_end, access_end;
2834 /* Variable sized objects in static constructors makes no sense,
2835 but field_size can be NULL for flexible array members. */
2836 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2837 && TREE_CODE (byte_offset) == INTEGER_CST
2838 && (field_size != NULL_TREE
2839 ? TREE_CODE (field_size) == INTEGER_CST
2840 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2842 /* Compute bit offset of the field. */
2843 bitoffset = double_int_add (tree_to_double_int (field_offset),
2844 double_int_mul (byte_offset_cst,
2845 bits_per_unit_cst));
2846 /* Compute bit offset where the field ends. */
2847 if (field_size != NULL_TREE)
2848 bitoffset_end = double_int_add (bitoffset,
2849 tree_to_double_int (field_size));
2851 bitoffset_end = double_int_zero;
2853 access_end = double_int_add (uhwi_to_double_int (offset),
2854 uhwi_to_double_int (size));
2856 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2857 [BITOFFSET, BITOFFSET_END)? */
2858 if (double_int_cmp (access_end, bitoffset, 0) > 0
2859 && (field_size == NULL_TREE
2860 || double_int_cmp (uhwi_to_double_int (offset),
2861 bitoffset_end, 0) < 0))
2863 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2865 /* We do have overlap. Now see if field is large enough to
2866 cover the access. Give up for accesses spanning multiple
2868 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2870 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2872 return fold_ctor_reference (type, cval,
2873 double_int_to_uhwi (inner_offset), size);
2876 /* When memory is not explicitely mentioned in constructor, it is 0. */
2877 return build_zero_cst (type);
2880 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2881 to the memory at bit OFFSET. */
2884 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2885 unsigned HOST_WIDE_INT size)
2889 /* We found the field with exact match. */
2890 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2892 return canonicalize_constructor_val (ctor);
2894 /* We are at the end of walk, see if we can view convert the
2896 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2897 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2898 && operand_equal_p (TYPE_SIZE (type),
2899 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2901 ret = canonicalize_constructor_val (ctor);
2902 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2907 if (TREE_CODE (ctor) == STRING_CST)
2908 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2909 if (TREE_CODE (ctor) == CONSTRUCTOR)
2912 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2913 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2914 return fold_array_ctor_reference (type, ctor, offset, size);
2916 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2922 /* Return the tree representing the element referenced by T if T is an
2923 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2924 names using VALUEIZE. Return NULL_TREE otherwise. */
2927 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2929 tree ctor, idx, base;
2930 HOST_WIDE_INT offset, size, max_size;
2933 if (TREE_THIS_VOLATILE (t))
2936 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2937 return get_symbol_constant_value (t);
2939 tem = fold_read_from_constant_string (t);
2943 switch (TREE_CODE (t))
2946 case ARRAY_RANGE_REF:
2947 /* Constant indexes are handled well by get_base_constructor.
2948 Only special case variable offsets.
2949 FIXME: This code can't handle nested references with variable indexes
2950 (they will be handled only by iteration of ccp). Perhaps we can bring
2951 get_ref_base_and_extent here and make it use a valueize callback. */
2952 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2954 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2955 && host_integerp (idx, 0))
2957 tree low_bound, unit_size;
2959 /* If the resulting bit-offset is constant, track it. */
2960 if ((low_bound = array_ref_low_bound (t),
2961 host_integerp (low_bound, 0))
2962 && (unit_size = array_ref_element_size (t),
2963 host_integerp (unit_size, 1)))
2965 offset = TREE_INT_CST_LOW (idx);
2966 offset -= TREE_INT_CST_LOW (low_bound);
2967 offset *= TREE_INT_CST_LOW (unit_size);
2968 offset *= BITS_PER_UNIT;
2970 base = TREE_OPERAND (t, 0);
2971 ctor = get_base_constructor (base, &offset, valueize);
2972 /* Empty constructor. Always fold to 0. */
2973 if (ctor == error_mark_node)
2974 return build_zero_cst (TREE_TYPE (t));
2975 /* Out of bound array access. Value is undefined,
2979 /* We can not determine ctor. */
2982 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
2983 TREE_INT_CST_LOW (unit_size)
2991 case TARGET_MEM_REF:
2993 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
2994 ctor = get_base_constructor (base, &offset, valueize);
2996 /* Empty constructor. Always fold to 0. */
2997 if (ctor == error_mark_node)
2998 return build_zero_cst (TREE_TYPE (t));
2999 /* We do not know precise address. */
3000 if (max_size == -1 || max_size != size)
3002 /* We can not determine ctor. */
3006 /* Out of bound array access. Value is undefined, but don't fold. */
3010 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3015 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3016 if (c && TREE_CODE (c) == COMPLEX_CST)
3017 return fold_build1_loc (EXPR_LOCATION (t),
3018 TREE_CODE (t), TREE_TYPE (t), c);
3030 fold_const_aggregate_ref (tree t)
3032 return fold_const_aggregate_ref_1 (t, NULL);
3035 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3036 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3037 KNOWN_BINFO carries the binfo describing the true type of
3038 OBJ_TYPE_REF_OBJECT(REF). */
3041 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3043 unsigned HOST_WIDE_INT offset, size;
3046 v = BINFO_VTABLE (known_binfo);
3047 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3051 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3053 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3054 v = TREE_OPERAND (v, 0);
3059 if (TREE_CODE (v) != ADDR_EXPR)
3061 v = TREE_OPERAND (v, 0);
3063 if (TREE_CODE (v) != VAR_DECL
3064 || !DECL_VIRTUAL_P (v)
3065 || !DECL_INITIAL (v)
3066 || DECL_INITIAL (v) == error_mark_node)
3068 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3069 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3070 offset += token * size;
3071 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3075 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3076 || TREE_CODE (fn) == FDESC_EXPR);
3077 fn = TREE_OPERAND (fn, 0);
3078 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3080 /* When cgraph node is missing and function is not public, we cannot
3081 devirtualize. This can happen in WHOPR when the actual method
3082 ends up in other partition, because we found devirtualization
3083 possibility too late. */
3084 if (!can_refer_decl_in_current_unit_p (fn))
3090 /* Return true iff VAL is a gimple expression that is known to be
3091 non-negative. Restricted to floating-point inputs. */
3094 gimple_val_nonnegative_real_p (tree val)
3098 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3100 /* Use existing logic for non-gimple trees. */
3101 if (tree_expr_nonnegative_p (val))
3104 if (TREE_CODE (val) != SSA_NAME)
3107 /* Currently we look only at the immediately defining statement
3108 to make this determination, since recursion on defining
3109 statements of operands can lead to quadratic behavior in the
3110 worst case. This is expected to catch almost all occurrences
3111 in practice. It would be possible to implement limited-depth
3112 recursion if important cases are lost. Alternatively, passes
3113 that need this information (such as the pow/powi lowering code
3114 in the cse_sincos pass) could be revised to provide it through
3115 dataflow propagation. */
3117 def_stmt = SSA_NAME_DEF_STMT (val);
3119 if (is_gimple_assign (def_stmt))
3123 /* See fold-const.c:tree_expr_nonnegative_p for additional
3124 cases that could be handled with recursion. */
3126 switch (gimple_assign_rhs_code (def_stmt))
3129 /* Always true for floating-point operands. */
3133 /* True if the two operands are identical (since we are
3134 restricted to floating-point inputs). */
3135 op0 = gimple_assign_rhs1 (def_stmt);
3136 op1 = gimple_assign_rhs2 (def_stmt);
3139 || operand_equal_p (op0, op1, 0))
3146 else if (is_gimple_call (def_stmt))
3148 tree fndecl = gimple_call_fndecl (def_stmt);
3150 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3154 switch (DECL_FUNCTION_CODE (fndecl))
3156 CASE_FLT_FN (BUILT_IN_ACOS):
3157 CASE_FLT_FN (BUILT_IN_ACOSH):
3158 CASE_FLT_FN (BUILT_IN_CABS):
3159 CASE_FLT_FN (BUILT_IN_COSH):
3160 CASE_FLT_FN (BUILT_IN_ERFC):
3161 CASE_FLT_FN (BUILT_IN_EXP):
3162 CASE_FLT_FN (BUILT_IN_EXP10):
3163 CASE_FLT_FN (BUILT_IN_EXP2):
3164 CASE_FLT_FN (BUILT_IN_FABS):
3165 CASE_FLT_FN (BUILT_IN_FDIM):
3166 CASE_FLT_FN (BUILT_IN_HYPOT):
3167 CASE_FLT_FN (BUILT_IN_POW10):
3170 CASE_FLT_FN (BUILT_IN_SQRT):
3171 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3172 nonnegative inputs. */
3173 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3178 CASE_FLT_FN (BUILT_IN_POWI):
3179 /* True if the second argument is an even integer. */
3180 arg1 = gimple_call_arg (def_stmt, 1);
3182 if (TREE_CODE (arg1) == INTEGER_CST
3183 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3188 CASE_FLT_FN (BUILT_IN_POW):
3189 /* True if the second argument is an even integer-valued
3191 arg1 = gimple_call_arg (def_stmt, 1);
3193 if (TREE_CODE (arg1) == REAL_CST)
3198 c = TREE_REAL_CST (arg1);
3199 n = real_to_integer (&c);
3203 REAL_VALUE_TYPE cint;
3204 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3205 if (real_identical (&c, &cint))