1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node || !node->analyzed || node->global.inlined_to)
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval)
118 STRIP_USELESS_TYPE_CONVERSION (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
140 if (base && TREE_CODE (base) == VAR_DECL)
142 TREE_ADDRESSABLE (base) = 1;
143 if (cfun && gimple_referenced_vars (cfun))
144 add_referenced_var (base);
146 /* Fixup types in global initializers. */
147 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
148 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
153 /* If SYM is a constant variable with known value, return the value.
154 NULL_TREE is returned otherwise. */
157 get_symbol_constant_value (tree sym)
159 if (const_value_known_p (sym))
161 tree val = DECL_INITIAL (sym);
164 val = canonicalize_constructor_val (val);
165 if (val && is_gimple_min_invariant (val))
170 /* Variables declared 'const' without an initializer
171 have zero as the initializer if they may not be
172 overridden at link or run time. */
174 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
175 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
176 return build_zero_cst (TREE_TYPE (sym));
184 /* Subroutine of fold_stmt. We perform several simplifications of the
185 memory reference tree EXPR and make sure to re-gimplify them properly
186 after propagation of constant addresses. IS_LHS is true if the
187 reference is supposed to be an lvalue. */
190 maybe_fold_reference (tree expr, bool is_lhs)
195 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
196 || TREE_CODE (expr) == REALPART_EXPR
197 || TREE_CODE (expr) == IMAGPART_EXPR)
198 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
199 return fold_unary_loc (EXPR_LOCATION (expr),
202 TREE_OPERAND (expr, 0));
203 else if (TREE_CODE (expr) == BIT_FIELD_REF
204 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
205 return fold_ternary_loc (EXPR_LOCATION (expr),
208 TREE_OPERAND (expr, 0),
209 TREE_OPERAND (expr, 1),
210 TREE_OPERAND (expr, 2));
212 while (handled_component_p (*t))
213 t = &TREE_OPERAND (*t, 0);
215 /* Canonicalize MEM_REFs invariant address operand. Do this first
216 to avoid feeding non-canonical MEM_REFs elsewhere. */
217 if (TREE_CODE (*t) == MEM_REF
218 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
220 bool volatile_p = TREE_THIS_VOLATILE (*t);
221 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
222 TREE_OPERAND (*t, 0),
223 TREE_OPERAND (*t, 1));
226 TREE_THIS_VOLATILE (tem) = volatile_p;
228 tem = maybe_fold_reference (expr, is_lhs);
236 && (result = fold_const_aggregate_ref (expr))
237 && is_gimple_min_invariant (result))
240 /* Fold back MEM_REFs to reference trees. */
241 if (TREE_CODE (*t) == MEM_REF
242 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
243 && integer_zerop (TREE_OPERAND (*t, 1))
244 && (TREE_THIS_VOLATILE (*t)
245 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
246 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
247 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
248 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
249 /* We have to look out here to not drop a required conversion
250 from the rhs to the lhs if is_lhs, but we don't have the
251 rhs here to verify that. Thus require strict type
253 && types_compatible_p (TREE_TYPE (*t),
254 TREE_TYPE (TREE_OPERAND
255 (TREE_OPERAND (*t, 0), 0))))
258 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
259 tem = maybe_fold_reference (expr, is_lhs);
264 else if (TREE_CODE (*t) == TARGET_MEM_REF)
266 tree tem = maybe_fold_tmr (*t);
270 tem = maybe_fold_reference (expr, is_lhs);
281 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
282 replacement rhs for the statement or NULL_TREE if no simplification
283 could be made. It is assumed that the operands have been previously
287 fold_gimple_assign (gimple_stmt_iterator *si)
289 gimple stmt = gsi_stmt (*si);
290 enum tree_code subcode = gimple_assign_rhs_code (stmt);
291 location_t loc = gimple_location (stmt);
293 tree result = NULL_TREE;
295 switch (get_gimple_rhs_class (subcode))
297 case GIMPLE_SINGLE_RHS:
299 tree rhs = gimple_assign_rhs1 (stmt);
301 if (REFERENCE_CLASS_P (rhs))
302 return maybe_fold_reference (rhs, false);
304 else if (TREE_CODE (rhs) == ADDR_EXPR)
306 tree ref = TREE_OPERAND (rhs, 0);
307 tree tem = maybe_fold_reference (ref, true);
309 && TREE_CODE (tem) == MEM_REF
310 && integer_zerop (TREE_OPERAND (tem, 1)))
311 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
313 result = fold_convert (TREE_TYPE (rhs),
314 build_fold_addr_expr_loc (loc, tem));
315 else if (TREE_CODE (ref) == MEM_REF
316 && integer_zerop (TREE_OPERAND (ref, 1)))
317 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
320 else if (TREE_CODE (rhs) == CONSTRUCTOR
321 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
322 && (CONSTRUCTOR_NELTS (rhs)
323 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
325 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
329 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
330 if (TREE_CODE (val) != INTEGER_CST
331 && TREE_CODE (val) != REAL_CST
332 && TREE_CODE (val) != FIXED_CST)
335 return build_vector_from_ctor (TREE_TYPE (rhs),
336 CONSTRUCTOR_ELTS (rhs));
339 else if (DECL_P (rhs))
340 return unshare_expr (get_symbol_constant_value (rhs));
342 /* If we couldn't fold the RHS, hand over to the generic
344 if (result == NULL_TREE)
347 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
348 that may have been added by fold, and "useless" type
349 conversions that might now be apparent due to propagation. */
350 STRIP_USELESS_TYPE_CONVERSION (result);
352 if (result != rhs && valid_gimple_rhs_p (result))
359 case GIMPLE_UNARY_RHS:
361 tree rhs = gimple_assign_rhs1 (stmt);
363 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
366 /* If the operation was a conversion do _not_ mark a
367 resulting constant with TREE_OVERFLOW if the original
368 constant was not. These conversions have implementation
369 defined behavior and retaining the TREE_OVERFLOW flag
370 here would confuse later passes such as VRP. */
371 if (CONVERT_EXPR_CODE_P (subcode)
372 && TREE_CODE (result) == INTEGER_CST
373 && TREE_CODE (rhs) == INTEGER_CST)
374 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
376 STRIP_USELESS_TYPE_CONVERSION (result);
377 if (valid_gimple_rhs_p (result))
383 case GIMPLE_BINARY_RHS:
384 /* Try to canonicalize for boolean-typed X the comparisons
385 X == 0, X == 1, X != 0, and X != 1. */
386 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
387 || gimple_assign_rhs_code (stmt) == NE_EXPR)
389 tree lhs = gimple_assign_lhs (stmt);
390 tree op1 = gimple_assign_rhs1 (stmt);
391 tree op2 = gimple_assign_rhs2 (stmt);
392 tree type = TREE_TYPE (op1);
394 /* Check whether the comparison operands are of the same boolean
395 type as the result type is.
396 Check that second operand is an integer-constant with value
398 if (TREE_CODE (op2) == INTEGER_CST
399 && (integer_zerop (op2) || integer_onep (op2))
400 && useless_type_conversion_p (TREE_TYPE (lhs), type))
402 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
403 bool is_logical_not = false;
405 /* X == 0 and X != 1 is a logical-not.of X
406 X == 1 and X != 0 is X */
407 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
408 || (cmp_code == NE_EXPR && integer_onep (op2)))
409 is_logical_not = true;
411 if (is_logical_not == false)
413 /* Only for one-bit precision typed X the transformation
414 !X -> ~X is valied. */
415 else if (TYPE_PRECISION (type) == 1)
416 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
418 /* Otherwise we use !X -> X ^ 1. */
420 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
421 type, op1, build_int_cst (type, 1));
427 result = fold_binary_loc (loc, subcode,
428 TREE_TYPE (gimple_assign_lhs (stmt)),
429 gimple_assign_rhs1 (stmt),
430 gimple_assign_rhs2 (stmt));
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
440 case GIMPLE_TERNARY_RHS:
441 /* Try to fold a conditional expression. */
442 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
444 tree op0 = gimple_assign_rhs1 (stmt);
447 location_t cond_loc = gimple_location (stmt);
449 if (COMPARISON_CLASS_P (op0))
451 fold_defer_overflow_warnings ();
452 tem = fold_binary_loc (cond_loc,
453 TREE_CODE (op0), TREE_TYPE (op0),
454 TREE_OPERAND (op0, 0),
455 TREE_OPERAND (op0, 1));
456 /* This is actually a conditional expression, not a GIMPLE
457 conditional statement, however, the valid_gimple_rhs_p
458 test still applies. */
459 set = (tem && is_gimple_condexpr (tem)
460 && valid_gimple_rhs_p (tem));
461 fold_undefer_overflow_warnings (set, stmt, 0);
463 else if (is_gimple_min_invariant (op0))
472 result = fold_build3_loc (cond_loc, COND_EXPR,
473 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
474 gimple_assign_rhs2 (stmt),
475 gimple_assign_rhs3 (stmt));
479 result = fold_ternary_loc (loc, subcode,
480 TREE_TYPE (gimple_assign_lhs (stmt)),
481 gimple_assign_rhs1 (stmt),
482 gimple_assign_rhs2 (stmt),
483 gimple_assign_rhs3 (stmt));
487 STRIP_USELESS_TYPE_CONVERSION (result);
488 if (valid_gimple_rhs_p (result))
493 case GIMPLE_INVALID_RHS:
500 /* Attempt to fold a conditional statement. Return true if any changes were
501 made. We only attempt to fold the condition expression, and do not perform
502 any transformation that would require alteration of the cfg. It is
503 assumed that the operands have been previously folded. */
506 fold_gimple_cond (gimple stmt)
508 tree result = fold_binary_loc (gimple_location (stmt),
509 gimple_cond_code (stmt),
511 gimple_cond_lhs (stmt),
512 gimple_cond_rhs (stmt));
516 STRIP_USELESS_TYPE_CONVERSION (result);
517 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
519 gimple_cond_set_condition_from_tree (stmt, result);
527 /* Convert EXPR into a GIMPLE value suitable for substitution on the
528 RHS of an assignment. Insert the necessary statements before
529 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
530 is replaced. If the call is expected to produces a result, then it
531 is replaced by an assignment of the new RHS to the result variable.
532 If the result is to be ignored, then the call is replaced by a
533 GIMPLE_NOP. A proper VDEF chain is retained by making the first
534 VUSE and the last VDEF of the whole sequence be the same as the replaced
535 statement and using new SSA names for stores in between. */
538 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
541 gimple stmt, new_stmt;
542 gimple_stmt_iterator i;
543 gimple_seq stmts = gimple_seq_alloc();
544 struct gimplify_ctx gctx;
549 stmt = gsi_stmt (*si_p);
551 gcc_assert (is_gimple_call (stmt));
553 push_gimplify_context (&gctx);
554 gctx.into_ssa = gimple_in_ssa_p (cfun);
556 lhs = gimple_call_lhs (stmt);
557 if (lhs == NULL_TREE)
559 gimplify_and_add (expr, &stmts);
560 /* We can end up with folding a memcpy of an empty class assignment
561 which gets optimized away by C++ gimplification. */
562 if (gimple_seq_empty_p (stmts))
564 pop_gimplify_context (NULL);
565 if (gimple_in_ssa_p (cfun))
567 unlink_stmt_vdef (stmt);
570 gsi_remove (si_p, true);
576 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
577 new_stmt = gimple_build_assign (lhs, tmp);
578 i = gsi_last (stmts);
579 gsi_insert_after_without_update (&i, new_stmt,
580 GSI_CONTINUE_LINKING);
583 pop_gimplify_context (NULL);
585 if (gimple_has_location (stmt))
586 annotate_all_with_location (stmts, gimple_location (stmt));
588 /* First iterate over the replacement statements backward, assigning
589 virtual operands to their defining statements. */
591 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
593 new_stmt = gsi_stmt (i);
594 if ((gimple_assign_single_p (new_stmt)
595 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
596 || (is_gimple_call (new_stmt)
597 && (gimple_call_flags (new_stmt)
598 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
602 vdef = gimple_vdef (stmt);
604 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
605 gimple_set_vdef (new_stmt, vdef);
606 if (vdef && TREE_CODE (vdef) == SSA_NAME)
607 SSA_NAME_DEF_STMT (vdef) = new_stmt;
608 laststore = new_stmt;
612 /* Second iterate over the statements forward, assigning virtual
613 operands to their uses. */
615 reaching_vuse = gimple_vuse (stmt);
616 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
618 /* Do not insert the last stmt in this loop but remember it
619 for replacing the original statement. */
622 gsi_insert_before (si_p, last, GSI_NEW_STMT);
625 new_stmt = gsi_stmt (i);
626 /* The replacement can expose previously unreferenced variables. */
627 if (gimple_in_ssa_p (cfun))
628 find_new_referenced_vars (new_stmt);
629 /* If the new statement possibly has a VUSE, update it with exact SSA
630 name we know will reach this one. */
631 if (gimple_has_mem_ops (new_stmt))
632 gimple_set_vuse (new_stmt, reaching_vuse);
633 gimple_set_modified (new_stmt, true);
634 if (gimple_vdef (new_stmt))
635 reaching_vuse = gimple_vdef (new_stmt);
639 /* If the new sequence does not do a store release the virtual
640 definition of the original statement. */
642 && reaching_vuse == gimple_vuse (stmt))
644 tree vdef = gimple_vdef (stmt);
646 && TREE_CODE (vdef) == SSA_NAME)
648 unlink_stmt_vdef (stmt);
649 release_ssa_name (vdef);
653 /* Finally replace rhe original statement with the last. */
654 gsi_replace (si_p, last, false);
657 /* Return the string length, maximum string length or maximum value of
659 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
660 is not NULL and, for TYPE == 0, its value is not equal to the length
661 we determine or if we are unable to determine the length or value,
662 return false. VISITED is a bitmap of visited variables.
663 TYPE is 0 if string length should be returned, 1 for maximum string
664 length and 2 for maximum value ARG can have. */
667 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
672 if (TREE_CODE (arg) != SSA_NAME)
674 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
675 if (TREE_CODE (arg) == ADDR_EXPR
676 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
677 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
679 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
680 if (TREE_CODE (aop0) == INDIRECT_REF
681 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
682 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
683 length, visited, type);
689 if (TREE_CODE (val) != INTEGER_CST
690 || tree_int_cst_sgn (val) < 0)
694 val = c_strlen (arg, 1);
702 if (TREE_CODE (*length) != INTEGER_CST
703 || TREE_CODE (val) != INTEGER_CST)
706 if (tree_int_cst_lt (*length, val))
710 else if (simple_cst_equal (val, *length) != 1)
718 /* If we were already here, break the infinite cycle. */
719 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
723 def_stmt = SSA_NAME_DEF_STMT (var);
725 switch (gimple_code (def_stmt))
728 /* The RHS of the statement defining VAR must either have a
729 constant length or come from another SSA_NAME with a constant
731 if (gimple_assign_single_p (def_stmt)
732 || gimple_assign_unary_nop_p (def_stmt))
734 tree rhs = gimple_assign_rhs1 (def_stmt);
735 return get_maxval_strlen (rhs, length, visited, type);
737 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
739 tree op2 = gimple_assign_rhs2 (def_stmt);
740 tree op3 = gimple_assign_rhs3 (def_stmt);
741 return get_maxval_strlen (op2, length, visited, type)
742 && get_maxval_strlen (op3, length, visited, type);
748 /* All the arguments of the PHI node must have the same constant
752 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
754 tree arg = gimple_phi_arg (def_stmt, i)->def;
756 /* If this PHI has itself as an argument, we cannot
757 determine the string length of this argument. However,
758 if we can find a constant string length for the other
759 PHI args then we can still be sure that this is a
760 constant string length. So be optimistic and just
761 continue with the next argument. */
762 if (arg == gimple_phi_result (def_stmt))
765 if (!get_maxval_strlen (arg, length, visited, type))
777 /* Fold builtin call in statement STMT. Returns a simplified tree.
778 We may return a non-constant expression, including another call
779 to a different function and with different arguments, e.g.,
780 substituting memcpy for strcpy when the string length is known.
781 Note that some builtins expand into inline code that may not
782 be valid in GIMPLE. Callers must take care. */
785 gimple_fold_builtin (gimple stmt)
793 location_t loc = gimple_location (stmt);
795 gcc_assert (is_gimple_call (stmt));
797 ignore = (gimple_call_lhs (stmt) == NULL);
799 /* First try the generic builtin folder. If that succeeds, return the
801 result = fold_call_stmt (stmt, ignore);
809 /* Ignore MD builtins. */
810 callee = gimple_call_fndecl (stmt);
811 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
814 /* Give up for always_inline inline builtins until they are
816 if (avoid_folding_inline_builtin (callee))
819 /* If the builtin could not be folded, and it has no argument list,
821 nargs = gimple_call_num_args (stmt);
825 /* Limit the work only for builtins we know how to simplify. */
826 switch (DECL_FUNCTION_CODE (callee))
828 case BUILT_IN_STRLEN:
830 case BUILT_IN_FPUTS_UNLOCKED:
834 case BUILT_IN_STRCPY:
835 case BUILT_IN_STRNCPY:
839 case BUILT_IN_MEMCPY_CHK:
840 case BUILT_IN_MEMPCPY_CHK:
841 case BUILT_IN_MEMMOVE_CHK:
842 case BUILT_IN_MEMSET_CHK:
843 case BUILT_IN_STRNCPY_CHK:
844 case BUILT_IN_STPNCPY_CHK:
848 case BUILT_IN_STRCPY_CHK:
849 case BUILT_IN_STPCPY_CHK:
853 case BUILT_IN_SNPRINTF_CHK:
854 case BUILT_IN_VSNPRINTF_CHK:
862 if (arg_idx >= nargs)
865 /* Try to use the dataflow information gathered by the CCP process. */
866 visited = BITMAP_ALLOC (NULL);
867 bitmap_clear (visited);
869 memset (val, 0, sizeof (val));
870 a = gimple_call_arg (stmt, arg_idx);
871 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
872 val[arg_idx] = NULL_TREE;
874 BITMAP_FREE (visited);
877 switch (DECL_FUNCTION_CODE (callee))
879 case BUILT_IN_STRLEN:
880 if (val[0] && nargs == 1)
883 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
885 /* If the result is not a valid gimple value, or not a cast
886 of a valid gimple value, then we cannot use the result. */
887 if (is_gimple_val (new_val)
888 || (CONVERT_EXPR_P (new_val)
889 && is_gimple_val (TREE_OPERAND (new_val, 0))))
894 case BUILT_IN_STRCPY:
895 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
896 result = fold_builtin_strcpy (loc, callee,
897 gimple_call_arg (stmt, 0),
898 gimple_call_arg (stmt, 1),
902 case BUILT_IN_STRNCPY:
903 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
904 result = fold_builtin_strncpy (loc, callee,
905 gimple_call_arg (stmt, 0),
906 gimple_call_arg (stmt, 1),
907 gimple_call_arg (stmt, 2),
913 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
914 gimple_call_arg (stmt, 1),
915 ignore, false, val[0]);
918 case BUILT_IN_FPUTS_UNLOCKED:
920 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
921 gimple_call_arg (stmt, 1),
922 ignore, true, val[0]);
925 case BUILT_IN_MEMCPY_CHK:
926 case BUILT_IN_MEMPCPY_CHK:
927 case BUILT_IN_MEMMOVE_CHK:
928 case BUILT_IN_MEMSET_CHK:
929 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
930 result = fold_builtin_memory_chk (loc, callee,
931 gimple_call_arg (stmt, 0),
932 gimple_call_arg (stmt, 1),
933 gimple_call_arg (stmt, 2),
934 gimple_call_arg (stmt, 3),
936 DECL_FUNCTION_CODE (callee));
939 case BUILT_IN_STRCPY_CHK:
940 case BUILT_IN_STPCPY_CHK:
941 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
942 result = fold_builtin_stxcpy_chk (loc, callee,
943 gimple_call_arg (stmt, 0),
944 gimple_call_arg (stmt, 1),
945 gimple_call_arg (stmt, 2),
947 DECL_FUNCTION_CODE (callee));
950 case BUILT_IN_STRNCPY_CHK:
951 case BUILT_IN_STPNCPY_CHK:
952 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
953 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
954 gimple_call_arg (stmt, 1),
955 gimple_call_arg (stmt, 2),
956 gimple_call_arg (stmt, 3),
958 DECL_FUNCTION_CODE (callee));
961 case BUILT_IN_SNPRINTF_CHK:
962 case BUILT_IN_VSNPRINTF_CHK:
963 if (val[1] && is_gimple_val (val[1]))
964 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
965 DECL_FUNCTION_CODE (callee));
972 if (result && ignore)
973 result = fold_ignored_result (result);
977 /* Generate code adjusting the first parameter of a call statement determined
981 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
983 gimple call_stmt = gsi_stmt (*gsi);
987 delta = convert_to_ptrofftype (delta);
988 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
989 parm = gimple_call_arg (call_stmt, 0);
990 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
991 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
992 add_referenced_var (tmp);
994 tmp = make_ssa_name (tmp, NULL);
995 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
996 SSA_NAME_DEF_STMT (tmp) = new_stmt;
997 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
998 gimple_call_set_arg (call_stmt, 0, tmp);
1001 /* Return a binfo to be used for devirtualization of calls based on an object
1002 represented by a declaration (i.e. a global or automatically allocated one)
1003 or NULL if it cannot be found or is not safe. CST is expected to be an
1004 ADDR_EXPR of such object or the function will return NULL. Currently it is
1005 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1008 gimple_extract_devirt_binfo_from_cst (tree cst)
1010 HOST_WIDE_INT offset, size, max_size;
1011 tree base, type, expected_type, binfo;
1012 bool last_artificial = false;
1014 if (!flag_devirtualize
1015 || TREE_CODE (cst) != ADDR_EXPR
1016 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1019 cst = TREE_OPERAND (cst, 0);
1020 expected_type = TREE_TYPE (cst);
1021 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1022 type = TREE_TYPE (base);
1026 || TREE_CODE (type) != RECORD_TYPE)
1029 /* Find the sub-object the constant actually refers to and mark whether it is
1030 an artificial one (as opposed to a user-defined one). */
1033 HOST_WIDE_INT pos, size;
1036 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1041 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1043 if (TREE_CODE (fld) != FIELD_DECL)
1046 pos = int_bit_position (fld);
1047 size = tree_low_cst (DECL_SIZE (fld), 1);
1048 if (pos <= offset && (pos + size) > offset)
1051 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1054 last_artificial = DECL_ARTIFICIAL (fld);
1055 type = TREE_TYPE (fld);
1058 /* Artifical sub-objects are ancestors, we do not want to use them for
1059 devirtualization, at least not here. */
1060 if (last_artificial)
1062 binfo = TYPE_BINFO (type);
1063 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1069 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1070 The statement may be replaced by another statement, e.g., if the call
1071 simplifies to a constant value. Return true if any changes were made.
1072 It is assumed that the operands have been previously folded. */
1075 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1077 gimple stmt = gsi_stmt (*gsi);
1079 bool changed = false;
1082 /* Fold *& in call arguments. */
1083 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1084 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1086 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1089 gimple_call_set_arg (stmt, i, tmp);
1094 /* Check for virtual calls that became direct calls. */
1095 callee = gimple_call_fn (stmt);
1096 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1098 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1100 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1105 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1106 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1110 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1111 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1114 gimple_call_set_fndecl (stmt, fndecl);
1124 /* Check for builtins that CCP can handle using information not
1125 available in the generic fold routines. */
1126 callee = gimple_call_fndecl (stmt);
1127 if (callee && DECL_BUILT_IN (callee))
1129 tree result = gimple_fold_builtin (stmt);
1132 if (!update_call_from_tree (gsi, result))
1133 gimplify_and_update_call_from_tree (gsi, result);
1141 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1142 distinguishes both cases. */
1145 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1147 bool changed = false;
1148 gimple stmt = gsi_stmt (*gsi);
1150 gimple_stmt_iterator gsinext = *gsi;
1153 gsi_next (&gsinext);
1154 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1156 /* Fold the main computation performed by the statement. */
1157 switch (gimple_code (stmt))
1161 unsigned old_num_ops = gimple_num_ops (stmt);
1162 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1163 tree lhs = gimple_assign_lhs (stmt);
1165 /* First canonicalize operand order. This avoids building new
1166 trees if this is the only thing fold would later do. */
1167 if ((commutative_tree_code (subcode)
1168 || commutative_ternary_tree_code (subcode))
1169 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1170 gimple_assign_rhs2 (stmt), false))
1172 tree tem = gimple_assign_rhs1 (stmt);
1173 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1174 gimple_assign_set_rhs2 (stmt, tem);
1177 new_rhs = fold_gimple_assign (gsi);
1179 && !useless_type_conversion_p (TREE_TYPE (lhs),
1180 TREE_TYPE (new_rhs)))
1181 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1184 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1186 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1193 changed |= fold_gimple_cond (stmt);
1197 changed |= gimple_fold_call (gsi, inplace);
1201 /* Fold *& in asm operands. */
1204 const char **oconstraints;
1205 const char *constraint;
1206 bool allows_mem, allows_reg;
1208 noutputs = gimple_asm_noutputs (stmt);
1209 oconstraints = XALLOCAVEC (const char *, noutputs);
1211 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1213 tree link = gimple_asm_output_op (stmt, i);
1214 tree op = TREE_VALUE (link);
1216 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1217 if (REFERENCE_CLASS_P (op)
1218 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1220 TREE_VALUE (link) = op;
1224 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1226 tree link = gimple_asm_input_op (stmt, i);
1227 tree op = TREE_VALUE (link);
1229 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1230 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1231 oconstraints, &allows_mem, &allows_reg);
1232 if (REFERENCE_CLASS_P (op)
1233 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1236 TREE_VALUE (link) = op;
1244 if (gimple_debug_bind_p (stmt))
1246 tree val = gimple_debug_bind_get_value (stmt);
1248 && REFERENCE_CLASS_P (val))
1250 tree tem = maybe_fold_reference (val, false);
1253 gimple_debug_bind_set_value (stmt, tem);
1258 && TREE_CODE (val) == ADDR_EXPR)
1260 tree ref = TREE_OPERAND (val, 0);
1261 tree tem = maybe_fold_reference (ref, false);
1264 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1265 gimple_debug_bind_set_value (stmt, tem);
1275 /* If stmt folds into nothing and it was the last stmt in a bb,
1276 don't call gsi_stmt. */
1277 if (gsi_end_p (*gsi))
1279 gcc_assert (next_stmt == NULL);
1283 stmt = gsi_stmt (*gsi);
1285 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1286 as we'd changing the next stmt. */
1287 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1289 tree lhs = gimple_get_lhs (stmt);
1290 if (lhs && REFERENCE_CLASS_P (lhs))
1292 tree new_lhs = maybe_fold_reference (lhs, true);
1295 gimple_set_lhs (stmt, new_lhs);
1304 /* Fold the statement pointed to by GSI. In some cases, this function may
1305 replace the whole statement with a new one. Returns true iff folding
1307 The statement pointed to by GSI should be in valid gimple form but may
1308 be in unfolded state as resulting from for example constant propagation
1309 which can produce *&x = 0. */
1312 fold_stmt (gimple_stmt_iterator *gsi)
1314 return fold_stmt_1 (gsi, false);
1317 /* Perform the minimal folding on statement *GSI. Only operations like
1318 *&x created by constant propagation are handled. The statement cannot
1319 be replaced with a new one. Return true if the statement was
1320 changed, false otherwise.
1321 The statement *GSI should be in valid gimple form but may
1322 be in unfolded state as resulting from for example constant propagation
1323 which can produce *&x = 0. */
1326 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1328 gimple stmt = gsi_stmt (*gsi);
1329 bool changed = fold_stmt_1 (gsi, true);
1330 gcc_assert (gsi_stmt (*gsi) == stmt);
1334 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1335 if EXPR is null or we don't know how.
1336 If non-null, the result always has boolean type. */
1339 canonicalize_bool (tree expr, bool invert)
1345 if (integer_nonzerop (expr))
1346 return boolean_false_node;
1347 else if (integer_zerop (expr))
1348 return boolean_true_node;
1349 else if (TREE_CODE (expr) == SSA_NAME)
1350 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1351 build_int_cst (TREE_TYPE (expr), 0));
1352 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1353 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1355 TREE_OPERAND (expr, 0),
1356 TREE_OPERAND (expr, 1));
1362 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1364 if (integer_nonzerop (expr))
1365 return boolean_true_node;
1366 else if (integer_zerop (expr))
1367 return boolean_false_node;
1368 else if (TREE_CODE (expr) == SSA_NAME)
1369 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1370 build_int_cst (TREE_TYPE (expr), 0));
1371 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1372 return fold_build2 (TREE_CODE (expr),
1374 TREE_OPERAND (expr, 0),
1375 TREE_OPERAND (expr, 1));
1381 /* Check to see if a boolean expression EXPR is logically equivalent to the
1382 comparison (OP1 CODE OP2). Check for various identities involving
1386 same_bool_comparison_p (const_tree expr, enum tree_code code,
1387 const_tree op1, const_tree op2)
1391 /* The obvious case. */
1392 if (TREE_CODE (expr) == code
1393 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1394 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1397 /* Check for comparing (name, name != 0) and the case where expr
1398 is an SSA_NAME with a definition matching the comparison. */
1399 if (TREE_CODE (expr) == SSA_NAME
1400 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1402 if (operand_equal_p (expr, op1, 0))
1403 return ((code == NE_EXPR && integer_zerop (op2))
1404 || (code == EQ_EXPR && integer_nonzerop (op2)));
1405 s = SSA_NAME_DEF_STMT (expr);
1406 if (is_gimple_assign (s)
1407 && gimple_assign_rhs_code (s) == code
1408 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1409 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1413 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1414 of name is a comparison, recurse. */
1415 if (TREE_CODE (op1) == SSA_NAME
1416 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1418 s = SSA_NAME_DEF_STMT (op1);
1419 if (is_gimple_assign (s)
1420 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1422 enum tree_code c = gimple_assign_rhs_code (s);
1423 if ((c == NE_EXPR && integer_zerop (op2))
1424 || (c == EQ_EXPR && integer_nonzerop (op2)))
1425 return same_bool_comparison_p (expr, c,
1426 gimple_assign_rhs1 (s),
1427 gimple_assign_rhs2 (s));
1428 if ((c == EQ_EXPR && integer_zerop (op2))
1429 || (c == NE_EXPR && integer_nonzerop (op2)))
1430 return same_bool_comparison_p (expr,
1431 invert_tree_comparison (c, false),
1432 gimple_assign_rhs1 (s),
1433 gimple_assign_rhs2 (s));
1439 /* Check to see if two boolean expressions OP1 and OP2 are logically
1443 same_bool_result_p (const_tree op1, const_tree op2)
1445 /* Simple cases first. */
1446 if (operand_equal_p (op1, op2, 0))
1449 /* Check the cases where at least one of the operands is a comparison.
1450 These are a bit smarter than operand_equal_p in that they apply some
1451 identifies on SSA_NAMEs. */
1452 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1453 && same_bool_comparison_p (op1, TREE_CODE (op2),
1454 TREE_OPERAND (op2, 0),
1455 TREE_OPERAND (op2, 1)))
1457 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1458 && same_bool_comparison_p (op2, TREE_CODE (op1),
1459 TREE_OPERAND (op1, 0),
1460 TREE_OPERAND (op1, 1)))
1467 /* Forward declarations for some mutually recursive functions. */
1470 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1471 enum tree_code code2, tree op2a, tree op2b);
1473 and_var_with_comparison (tree var, bool invert,
1474 enum tree_code code2, tree op2a, tree op2b);
1476 and_var_with_comparison_1 (gimple stmt,
1477 enum tree_code code2, tree op2a, tree op2b);
1479 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1480 enum tree_code code2, tree op2a, tree op2b);
1482 or_var_with_comparison (tree var, bool invert,
1483 enum tree_code code2, tree op2a, tree op2b);
1485 or_var_with_comparison_1 (gimple stmt,
1486 enum tree_code code2, tree op2a, tree op2b);
1488 /* Helper function for and_comparisons_1: try to simplify the AND of the
1489 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1490 If INVERT is true, invert the value of the VAR before doing the AND.
1491 Return NULL_EXPR if we can't simplify this to a single expression. */
1494 and_var_with_comparison (tree var, bool invert,
1495 enum tree_code code2, tree op2a, tree op2b)
1498 gimple stmt = SSA_NAME_DEF_STMT (var);
1500 /* We can only deal with variables whose definitions are assignments. */
1501 if (!is_gimple_assign (stmt))
1504 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1505 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1506 Then we only have to consider the simpler non-inverted cases. */
1508 t = or_var_with_comparison_1 (stmt,
1509 invert_tree_comparison (code2, false),
1512 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1513 return canonicalize_bool (t, invert);
1516 /* Try to simplify the AND of the ssa variable defined by the assignment
1517 STMT with the comparison specified by (OP2A CODE2 OP2B).
1518 Return NULL_EXPR if we can't simplify this to a single expression. */
1521 and_var_with_comparison_1 (gimple stmt,
1522 enum tree_code code2, tree op2a, tree op2b)
1524 tree var = gimple_assign_lhs (stmt);
1525 tree true_test_var = NULL_TREE;
1526 tree false_test_var = NULL_TREE;
1527 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1529 /* Check for identities like (var AND (var == 0)) => false. */
1530 if (TREE_CODE (op2a) == SSA_NAME
1531 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1533 if ((code2 == NE_EXPR && integer_zerop (op2b))
1534 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1536 true_test_var = op2a;
1537 if (var == true_test_var)
1540 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1541 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1543 false_test_var = op2a;
1544 if (var == false_test_var)
1545 return boolean_false_node;
1549 /* If the definition is a comparison, recurse on it. */
1550 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1552 tree t = and_comparisons_1 (innercode,
1553 gimple_assign_rhs1 (stmt),
1554 gimple_assign_rhs2 (stmt),
1562 /* If the definition is an AND or OR expression, we may be able to
1563 simplify by reassociating. */
1564 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1565 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1567 tree inner1 = gimple_assign_rhs1 (stmt);
1568 tree inner2 = gimple_assign_rhs2 (stmt);
1571 tree partial = NULL_TREE;
1572 bool is_and = (innercode == BIT_AND_EXPR);
1574 /* Check for boolean identities that don't require recursive examination
1576 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1577 inner1 AND (inner1 OR inner2) => inner1
1578 !inner1 AND (inner1 AND inner2) => false
1579 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1580 Likewise for similar cases involving inner2. */
1581 if (inner1 == true_test_var)
1582 return (is_and ? var : inner1);
1583 else if (inner2 == true_test_var)
1584 return (is_and ? var : inner2);
1585 else if (inner1 == false_test_var)
1587 ? boolean_false_node
1588 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1589 else if (inner2 == false_test_var)
1591 ? boolean_false_node
1592 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1594 /* Next, redistribute/reassociate the AND across the inner tests.
1595 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1596 if (TREE_CODE (inner1) == SSA_NAME
1597 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1598 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1599 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1600 gimple_assign_rhs1 (s),
1601 gimple_assign_rhs2 (s),
1602 code2, op2a, op2b)))
1604 /* Handle the AND case, where we are reassociating:
1605 (inner1 AND inner2) AND (op2a code2 op2b)
1607 If the partial result t is a constant, we win. Otherwise
1608 continue on to try reassociating with the other inner test. */
1611 if (integer_onep (t))
1613 else if (integer_zerop (t))
1614 return boolean_false_node;
1617 /* Handle the OR case, where we are redistributing:
1618 (inner1 OR inner2) AND (op2a code2 op2b)
1619 => (t OR (inner2 AND (op2a code2 op2b))) */
1620 else if (integer_onep (t))
1621 return boolean_true_node;
1623 /* Save partial result for later. */
1627 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1628 if (TREE_CODE (inner2) == SSA_NAME
1629 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1630 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1631 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1632 gimple_assign_rhs1 (s),
1633 gimple_assign_rhs2 (s),
1634 code2, op2a, op2b)))
1636 /* Handle the AND case, where we are reassociating:
1637 (inner1 AND inner2) AND (op2a code2 op2b)
1638 => (inner1 AND t) */
1641 if (integer_onep (t))
1643 else if (integer_zerop (t))
1644 return boolean_false_node;
1645 /* If both are the same, we can apply the identity
1647 else if (partial && same_bool_result_p (t, partial))
1651 /* Handle the OR case. where we are redistributing:
1652 (inner1 OR inner2) AND (op2a code2 op2b)
1653 => (t OR (inner1 AND (op2a code2 op2b)))
1654 => (t OR partial) */
1657 if (integer_onep (t))
1658 return boolean_true_node;
1661 /* We already got a simplification for the other
1662 operand to the redistributed OR expression. The
1663 interesting case is when at least one is false.
1664 Or, if both are the same, we can apply the identity
1666 if (integer_zerop (partial))
1668 else if (integer_zerop (t))
1670 else if (same_bool_result_p (t, partial))
1679 /* Try to simplify the AND of two comparisons defined by
1680 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1681 If this can be done without constructing an intermediate value,
1682 return the resulting tree; otherwise NULL_TREE is returned.
1683 This function is deliberately asymmetric as it recurses on SSA_DEFs
1684 in the first comparison but not the second. */
1687 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1688 enum tree_code code2, tree op2a, tree op2b)
1690 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1691 if (operand_equal_p (op1a, op2a, 0)
1692 && operand_equal_p (op1b, op2b, 0))
1694 /* Result will be either NULL_TREE, or a combined comparison. */
1695 tree t = combine_comparisons (UNKNOWN_LOCATION,
1696 TRUTH_ANDIF_EXPR, code1, code2,
1697 boolean_type_node, op1a, op1b);
1702 /* Likewise the swapped case of the above. */
1703 if (operand_equal_p (op1a, op2b, 0)
1704 && operand_equal_p (op1b, op2a, 0))
1706 /* Result will be either NULL_TREE, or a combined comparison. */
1707 tree t = combine_comparisons (UNKNOWN_LOCATION,
1708 TRUTH_ANDIF_EXPR, code1,
1709 swap_tree_comparison (code2),
1710 boolean_type_node, op1a, op1b);
1715 /* If both comparisons are of the same value against constants, we might
1716 be able to merge them. */
1717 if (operand_equal_p (op1a, op2a, 0)
1718 && TREE_CODE (op1b) == INTEGER_CST
1719 && TREE_CODE (op2b) == INTEGER_CST)
1721 int cmp = tree_int_cst_compare (op1b, op2b);
1723 /* If we have (op1a == op1b), we should either be able to
1724 return that or FALSE, depending on whether the constant op1b
1725 also satisfies the other comparison against op2b. */
1726 if (code1 == EQ_EXPR)
1732 case EQ_EXPR: val = (cmp == 0); break;
1733 case NE_EXPR: val = (cmp != 0); break;
1734 case LT_EXPR: val = (cmp < 0); break;
1735 case GT_EXPR: val = (cmp > 0); break;
1736 case LE_EXPR: val = (cmp <= 0); break;
1737 case GE_EXPR: val = (cmp >= 0); break;
1738 default: done = false;
1743 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1745 return boolean_false_node;
1748 /* Likewise if the second comparison is an == comparison. */
1749 else if (code2 == EQ_EXPR)
1755 case EQ_EXPR: val = (cmp == 0); break;
1756 case NE_EXPR: val = (cmp != 0); break;
1757 case LT_EXPR: val = (cmp > 0); break;
1758 case GT_EXPR: val = (cmp < 0); break;
1759 case LE_EXPR: val = (cmp >= 0); break;
1760 case GE_EXPR: val = (cmp <= 0); break;
1761 default: done = false;
1766 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1768 return boolean_false_node;
1772 /* Same business with inequality tests. */
1773 else if (code1 == NE_EXPR)
1778 case EQ_EXPR: val = (cmp != 0); break;
1779 case NE_EXPR: val = (cmp == 0); break;
1780 case LT_EXPR: val = (cmp >= 0); break;
1781 case GT_EXPR: val = (cmp <= 0); break;
1782 case LE_EXPR: val = (cmp > 0); break;
1783 case GE_EXPR: val = (cmp < 0); break;
1788 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1790 else if (code2 == NE_EXPR)
1795 case EQ_EXPR: val = (cmp == 0); break;
1796 case NE_EXPR: val = (cmp != 0); break;
1797 case LT_EXPR: val = (cmp <= 0); break;
1798 case GT_EXPR: val = (cmp >= 0); break;
1799 case LE_EXPR: val = (cmp < 0); break;
1800 case GE_EXPR: val = (cmp > 0); break;
1805 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1808 /* Chose the more restrictive of two < or <= comparisons. */
1809 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1810 && (code2 == LT_EXPR || code2 == LE_EXPR))
1812 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1813 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1815 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1818 /* Likewise chose the more restrictive of two > or >= comparisons. */
1819 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1820 && (code2 == GT_EXPR || code2 == GE_EXPR))
1822 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1823 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1825 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1828 /* Check for singleton ranges. */
1830 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1831 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1832 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1834 /* Check for disjoint ranges. */
1836 && (code1 == LT_EXPR || code1 == LE_EXPR)
1837 && (code2 == GT_EXPR || code2 == GE_EXPR))
1838 return boolean_false_node;
1840 && (code1 == GT_EXPR || code1 == GE_EXPR)
1841 && (code2 == LT_EXPR || code2 == LE_EXPR))
1842 return boolean_false_node;
1845 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1846 NAME's definition is a truth value. See if there are any simplifications
1847 that can be done against the NAME's definition. */
1848 if (TREE_CODE (op1a) == SSA_NAME
1849 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1850 && (integer_zerop (op1b) || integer_onep (op1b)))
1852 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1853 || (code1 == NE_EXPR && integer_onep (op1b)));
1854 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1855 switch (gimple_code (stmt))
1858 /* Try to simplify by copy-propagating the definition. */
1859 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1862 /* If every argument to the PHI produces the same result when
1863 ANDed with the second comparison, we win.
1864 Do not do this unless the type is bool since we need a bool
1865 result here anyway. */
1866 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1868 tree result = NULL_TREE;
1870 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1872 tree arg = gimple_phi_arg_def (stmt, i);
1874 /* If this PHI has itself as an argument, ignore it.
1875 If all the other args produce the same result,
1877 if (arg == gimple_phi_result (stmt))
1879 else if (TREE_CODE (arg) == INTEGER_CST)
1881 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1884 result = boolean_false_node;
1885 else if (!integer_zerop (result))
1889 result = fold_build2 (code2, boolean_type_node,
1891 else if (!same_bool_comparison_p (result,
1895 else if (TREE_CODE (arg) == SSA_NAME
1896 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1899 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1900 /* In simple cases we can look through PHI nodes,
1901 but we have to be careful with loops.
1903 if (! dom_info_available_p (CDI_DOMINATORS)
1904 || gimple_bb (def_stmt) == gimple_bb (stmt)
1905 || dominated_by_p (CDI_DOMINATORS,
1906 gimple_bb (def_stmt),
1909 temp = and_var_with_comparison (arg, invert, code2,
1915 else if (!same_bool_result_p (result, temp))
1931 /* Try to simplify the AND of two comparisons, specified by
1932 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1933 If this can be simplified to a single expression (without requiring
1934 introducing more SSA variables to hold intermediate values),
1935 return the resulting tree. Otherwise return NULL_TREE.
1936 If the result expression is non-null, it has boolean type. */
1939 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1940 enum tree_code code2, tree op2a, tree op2b)
1942 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1946 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1949 /* Helper function for or_comparisons_1: try to simplify the OR of the
1950 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1951 If INVERT is true, invert the value of VAR before doing the OR.
1952 Return NULL_EXPR if we can't simplify this to a single expression. */
1955 or_var_with_comparison (tree var, bool invert,
1956 enum tree_code code2, tree op2a, tree op2b)
1959 gimple stmt = SSA_NAME_DEF_STMT (var);
1961 /* We can only deal with variables whose definitions are assignments. */
1962 if (!is_gimple_assign (stmt))
1965 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1966 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1967 Then we only have to consider the simpler non-inverted cases. */
1969 t = and_var_with_comparison_1 (stmt,
1970 invert_tree_comparison (code2, false),
1973 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1974 return canonicalize_bool (t, invert);
1977 /* Try to simplify the OR of the ssa variable defined by the assignment
1978 STMT with the comparison specified by (OP2A CODE2 OP2B).
1979 Return NULL_EXPR if we can't simplify this to a single expression. */
1982 or_var_with_comparison_1 (gimple stmt,
1983 enum tree_code code2, tree op2a, tree op2b)
1985 tree var = gimple_assign_lhs (stmt);
1986 tree true_test_var = NULL_TREE;
1987 tree false_test_var = NULL_TREE;
1988 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1990 /* Check for identities like (var OR (var != 0)) => true . */
1991 if (TREE_CODE (op2a) == SSA_NAME
1992 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1994 if ((code2 == NE_EXPR && integer_zerop (op2b))
1995 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1997 true_test_var = op2a;
1998 if (var == true_test_var)
2001 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2002 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2004 false_test_var = op2a;
2005 if (var == false_test_var)
2006 return boolean_true_node;
2010 /* If the definition is a comparison, recurse on it. */
2011 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2013 tree t = or_comparisons_1 (innercode,
2014 gimple_assign_rhs1 (stmt),
2015 gimple_assign_rhs2 (stmt),
2023 /* If the definition is an AND or OR expression, we may be able to
2024 simplify by reassociating. */
2025 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2026 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2028 tree inner1 = gimple_assign_rhs1 (stmt);
2029 tree inner2 = gimple_assign_rhs2 (stmt);
2032 tree partial = NULL_TREE;
2033 bool is_or = (innercode == BIT_IOR_EXPR);
2035 /* Check for boolean identities that don't require recursive examination
2037 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2038 inner1 OR (inner1 AND inner2) => inner1
2039 !inner1 OR (inner1 OR inner2) => true
2040 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2042 if (inner1 == true_test_var)
2043 return (is_or ? var : inner1);
2044 else if (inner2 == true_test_var)
2045 return (is_or ? var : inner2);
2046 else if (inner1 == false_test_var)
2049 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2050 else if (inner2 == false_test_var)
2053 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2055 /* Next, redistribute/reassociate the OR across the inner tests.
2056 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2057 if (TREE_CODE (inner1) == SSA_NAME
2058 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2059 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2060 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2061 gimple_assign_rhs1 (s),
2062 gimple_assign_rhs2 (s),
2063 code2, op2a, op2b)))
2065 /* Handle the OR case, where we are reassociating:
2066 (inner1 OR inner2) OR (op2a code2 op2b)
2068 If the partial result t is a constant, we win. Otherwise
2069 continue on to try reassociating with the other inner test. */
2072 if (integer_onep (t))
2073 return boolean_true_node;
2074 else if (integer_zerop (t))
2078 /* Handle the AND case, where we are redistributing:
2079 (inner1 AND inner2) OR (op2a code2 op2b)
2080 => (t AND (inner2 OR (op2a code op2b))) */
2081 else if (integer_zerop (t))
2082 return boolean_false_node;
2084 /* Save partial result for later. */
2088 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2089 if (TREE_CODE (inner2) == SSA_NAME
2090 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2091 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2092 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2093 gimple_assign_rhs1 (s),
2094 gimple_assign_rhs2 (s),
2095 code2, op2a, op2b)))
2097 /* Handle the OR case, where we are reassociating:
2098 (inner1 OR inner2) OR (op2a code2 op2b)
2100 => (t OR partial) */
2103 if (integer_zerop (t))
2105 else if (integer_onep (t))
2106 return boolean_true_node;
2107 /* If both are the same, we can apply the identity
2109 else if (partial && same_bool_result_p (t, partial))
2113 /* Handle the AND case, where we are redistributing:
2114 (inner1 AND inner2) OR (op2a code2 op2b)
2115 => (t AND (inner1 OR (op2a code2 op2b)))
2116 => (t AND partial) */
2119 if (integer_zerop (t))
2120 return boolean_false_node;
2123 /* We already got a simplification for the other
2124 operand to the redistributed AND expression. The
2125 interesting case is when at least one is true.
2126 Or, if both are the same, we can apply the identity
2128 if (integer_onep (partial))
2130 else if (integer_onep (t))
2132 else if (same_bool_result_p (t, partial))
2141 /* Try to simplify the OR of two comparisons defined by
2142 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2143 If this can be done without constructing an intermediate value,
2144 return the resulting tree; otherwise NULL_TREE is returned.
2145 This function is deliberately asymmetric as it recurses on SSA_DEFs
2146 in the first comparison but not the second. */
2149 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2150 enum tree_code code2, tree op2a, tree op2b)
2152 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2153 if (operand_equal_p (op1a, op2a, 0)
2154 && operand_equal_p (op1b, op2b, 0))
2156 /* Result will be either NULL_TREE, or a combined comparison. */
2157 tree t = combine_comparisons (UNKNOWN_LOCATION,
2158 TRUTH_ORIF_EXPR, code1, code2,
2159 boolean_type_node, op1a, op1b);
2164 /* Likewise the swapped case of the above. */
2165 if (operand_equal_p (op1a, op2b, 0)
2166 && operand_equal_p (op1b, op2a, 0))
2168 /* Result will be either NULL_TREE, or a combined comparison. */
2169 tree t = combine_comparisons (UNKNOWN_LOCATION,
2170 TRUTH_ORIF_EXPR, code1,
2171 swap_tree_comparison (code2),
2172 boolean_type_node, op1a, op1b);
2177 /* If both comparisons are of the same value against constants, we might
2178 be able to merge them. */
2179 if (operand_equal_p (op1a, op2a, 0)
2180 && TREE_CODE (op1b) == INTEGER_CST
2181 && TREE_CODE (op2b) == INTEGER_CST)
2183 int cmp = tree_int_cst_compare (op1b, op2b);
2185 /* If we have (op1a != op1b), we should either be able to
2186 return that or TRUE, depending on whether the constant op1b
2187 also satisfies the other comparison against op2b. */
2188 if (code1 == NE_EXPR)
2194 case EQ_EXPR: val = (cmp == 0); break;
2195 case NE_EXPR: val = (cmp != 0); break;
2196 case LT_EXPR: val = (cmp < 0); break;
2197 case GT_EXPR: val = (cmp > 0); break;
2198 case LE_EXPR: val = (cmp <= 0); break;
2199 case GE_EXPR: val = (cmp >= 0); break;
2200 default: done = false;
2205 return boolean_true_node;
2207 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2210 /* Likewise if the second comparison is a != comparison. */
2211 else if (code2 == NE_EXPR)
2217 case EQ_EXPR: val = (cmp == 0); break;
2218 case NE_EXPR: val = (cmp != 0); break;
2219 case LT_EXPR: val = (cmp > 0); break;
2220 case GT_EXPR: val = (cmp < 0); break;
2221 case LE_EXPR: val = (cmp >= 0); break;
2222 case GE_EXPR: val = (cmp <= 0); break;
2223 default: done = false;
2228 return boolean_true_node;
2230 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2234 /* See if an equality test is redundant with the other comparison. */
2235 else if (code1 == EQ_EXPR)
2240 case EQ_EXPR: val = (cmp == 0); break;
2241 case NE_EXPR: val = (cmp != 0); break;
2242 case LT_EXPR: val = (cmp < 0); break;
2243 case GT_EXPR: val = (cmp > 0); break;
2244 case LE_EXPR: val = (cmp <= 0); break;
2245 case GE_EXPR: val = (cmp >= 0); break;
2250 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2252 else if (code2 == EQ_EXPR)
2257 case EQ_EXPR: val = (cmp == 0); break;
2258 case NE_EXPR: val = (cmp != 0); break;
2259 case LT_EXPR: val = (cmp > 0); break;
2260 case GT_EXPR: val = (cmp < 0); break;
2261 case LE_EXPR: val = (cmp >= 0); break;
2262 case GE_EXPR: val = (cmp <= 0); break;
2267 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2270 /* Chose the less restrictive of two < or <= comparisons. */
2271 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2272 && (code2 == LT_EXPR || code2 == LE_EXPR))
2274 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2275 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2277 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2280 /* Likewise chose the less restrictive of two > or >= comparisons. */
2281 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2282 && (code2 == GT_EXPR || code2 == GE_EXPR))
2284 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2285 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2287 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2290 /* Check for singleton ranges. */
2292 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2293 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2294 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2296 /* Check for less/greater pairs that don't restrict the range at all. */
2298 && (code1 == LT_EXPR || code1 == LE_EXPR)
2299 && (code2 == GT_EXPR || code2 == GE_EXPR))
2300 return boolean_true_node;
2302 && (code1 == GT_EXPR || code1 == GE_EXPR)
2303 && (code2 == LT_EXPR || code2 == LE_EXPR))
2304 return boolean_true_node;
2307 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2308 NAME's definition is a truth value. See if there are any simplifications
2309 that can be done against the NAME's definition. */
2310 if (TREE_CODE (op1a) == SSA_NAME
2311 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2312 && (integer_zerop (op1b) || integer_onep (op1b)))
2314 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2315 || (code1 == NE_EXPR && integer_onep (op1b)));
2316 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2317 switch (gimple_code (stmt))
2320 /* Try to simplify by copy-propagating the definition. */
2321 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2324 /* If every argument to the PHI produces the same result when
2325 ORed with the second comparison, we win.
2326 Do not do this unless the type is bool since we need a bool
2327 result here anyway. */
2328 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2330 tree result = NULL_TREE;
2332 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2334 tree arg = gimple_phi_arg_def (stmt, i);
2336 /* If this PHI has itself as an argument, ignore it.
2337 If all the other args produce the same result,
2339 if (arg == gimple_phi_result (stmt))
2341 else if (TREE_CODE (arg) == INTEGER_CST)
2343 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2346 result = boolean_true_node;
2347 else if (!integer_onep (result))
2351 result = fold_build2 (code2, boolean_type_node,
2353 else if (!same_bool_comparison_p (result,
2357 else if (TREE_CODE (arg) == SSA_NAME
2358 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2361 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2362 /* In simple cases we can look through PHI nodes,
2363 but we have to be careful with loops.
2365 if (! dom_info_available_p (CDI_DOMINATORS)
2366 || gimple_bb (def_stmt) == gimple_bb (stmt)
2367 || dominated_by_p (CDI_DOMINATORS,
2368 gimple_bb (def_stmt),
2371 temp = or_var_with_comparison (arg, invert, code2,
2377 else if (!same_bool_result_p (result, temp))
2393 /* Try to simplify the OR of two comparisons, specified by
2394 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2395 If this can be simplified to a single expression (without requiring
2396 introducing more SSA variables to hold intermediate values),
2397 return the resulting tree. Otherwise return NULL_TREE.
2398 If the result expression is non-null, it has boolean type. */
2401 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2402 enum tree_code code2, tree op2a, tree op2b)
2404 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2408 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2412 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2414 Either NULL_TREE, a simplified but non-constant or a constant
2417 ??? This should go into a gimple-fold-inline.h file to be eventually
2418 privatized with the single valueize function used in the various TUs
2419 to avoid the indirect function call overhead. */
2422 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2424 location_t loc = gimple_location (stmt);
2425 switch (gimple_code (stmt))
2429 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2431 switch (get_gimple_rhs_class (subcode))
2433 case GIMPLE_SINGLE_RHS:
2435 tree rhs = gimple_assign_rhs1 (stmt);
2436 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2438 if (TREE_CODE (rhs) == SSA_NAME)
2440 /* If the RHS is an SSA_NAME, return its known constant value,
2442 return (*valueize) (rhs);
2444 /* Handle propagating invariant addresses into address
2446 else if (TREE_CODE (rhs) == ADDR_EXPR
2447 && !is_gimple_min_invariant (rhs))
2449 HOST_WIDE_INT offset;
2451 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2455 && (CONSTANT_CLASS_P (base)
2456 || decl_address_invariant_p (base)))
2457 return build_invariant_address (TREE_TYPE (rhs),
2460 else if (TREE_CODE (rhs) == CONSTRUCTOR
2461 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2462 && (CONSTRUCTOR_NELTS (rhs)
2463 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2469 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2471 val = (*valueize) (val);
2472 if (TREE_CODE (val) == INTEGER_CST
2473 || TREE_CODE (val) == REAL_CST
2474 || TREE_CODE (val) == FIXED_CST)
2475 list = tree_cons (NULL_TREE, val, list);
2480 return build_vector (TREE_TYPE (rhs), nreverse (list));
2483 if (kind == tcc_reference)
2485 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2486 || TREE_CODE (rhs) == REALPART_EXPR
2487 || TREE_CODE (rhs) == IMAGPART_EXPR)
2488 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2490 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2491 return fold_unary_loc (EXPR_LOCATION (rhs),
2493 TREE_TYPE (rhs), val);
2495 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2496 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2498 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2499 return fold_ternary_loc (EXPR_LOCATION (rhs),
2501 TREE_TYPE (rhs), val,
2502 TREE_OPERAND (rhs, 1),
2503 TREE_OPERAND (rhs, 2));
2505 else if (TREE_CODE (rhs) == MEM_REF
2506 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2508 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2509 if (TREE_CODE (val) == ADDR_EXPR
2510 && is_gimple_min_invariant (val))
2512 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2514 TREE_OPERAND (rhs, 1));
2519 return fold_const_aggregate_ref_1 (rhs, valueize);
2521 else if (kind == tcc_declaration)
2522 return get_symbol_constant_value (rhs);
2526 case GIMPLE_UNARY_RHS:
2528 /* Handle unary operators that can appear in GIMPLE form.
2529 Note that we know the single operand must be a constant,
2530 so this should almost always return a simplified RHS. */
2531 tree lhs = gimple_assign_lhs (stmt);
2532 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2534 /* Conversions are useless for CCP purposes if they are
2535 value-preserving. Thus the restrictions that
2536 useless_type_conversion_p places for restrict qualification
2537 of pointer types should not apply here.
2538 Substitution later will only substitute to allowed places. */
2539 if (CONVERT_EXPR_CODE_P (subcode)
2540 && POINTER_TYPE_P (TREE_TYPE (lhs))
2541 && POINTER_TYPE_P (TREE_TYPE (op0))
2542 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2543 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2544 && TYPE_MODE (TREE_TYPE (lhs))
2545 == TYPE_MODE (TREE_TYPE (op0)))
2549 fold_unary_ignore_overflow_loc (loc, subcode,
2550 gimple_expr_type (stmt), op0);
2553 case GIMPLE_BINARY_RHS:
2555 /* Handle binary operators that can appear in GIMPLE form. */
2556 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2557 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2559 /* Translate &x + CST into an invariant form suitable for
2560 further propagation. */
2561 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2562 && TREE_CODE (op0) == ADDR_EXPR
2563 && TREE_CODE (op1) == INTEGER_CST)
2565 tree off = fold_convert (ptr_type_node, op1);
2566 return build_fold_addr_expr_loc
2568 fold_build2 (MEM_REF,
2569 TREE_TYPE (TREE_TYPE (op0)),
2570 unshare_expr (op0), off));
2573 return fold_binary_loc (loc, subcode,
2574 gimple_expr_type (stmt), op0, op1);
2577 case GIMPLE_TERNARY_RHS:
2579 /* Handle ternary operators that can appear in GIMPLE form. */
2580 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2581 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2582 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2584 /* Fold embedded expressions in ternary codes. */
2585 if ((subcode == COND_EXPR
2586 || subcode == VEC_COND_EXPR)
2587 && COMPARISON_CLASS_P (op0))
2589 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2590 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2591 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2592 TREE_TYPE (op0), op00, op01);
2597 return fold_ternary_loc (loc, subcode,
2598 gimple_expr_type (stmt), op0, op1, op2);
2610 if (gimple_call_internal_p (stmt))
2611 /* No folding yet for these functions. */
2614 fn = (*valueize) (gimple_call_fn (stmt));
2615 if (TREE_CODE (fn) == ADDR_EXPR
2616 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2617 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2619 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2622 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2623 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2624 call = build_call_array_loc (loc,
2625 gimple_call_return_type (stmt),
2626 fn, gimple_call_num_args (stmt), args);
2627 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2629 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2630 STRIP_NOPS (retval);
2641 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2642 Returns NULL_TREE if folding to a constant is not possible, otherwise
2643 returns a constant according to is_gimple_min_invariant. */
2646 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2648 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2649 if (res && is_gimple_min_invariant (res))
2655 /* The following set of functions are supposed to fold references using
2656 their constant initializers. */
2658 static tree fold_ctor_reference (tree type, tree ctor,
2659 unsigned HOST_WIDE_INT offset,
2660 unsigned HOST_WIDE_INT size);
2662 /* See if we can find constructor defining value of BASE.
2663 When we know the consructor with constant offset (such as
2664 base is array[40] and we do know constructor of array), then
2665 BIT_OFFSET is adjusted accordingly.
2667 As a special case, return error_mark_node when constructor
2668 is not explicitly available, but it is known to be zero
2669 such as 'static const int a;'. */
2671 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2672 tree (*valueize)(tree))
2674 HOST_WIDE_INT bit_offset2, size, max_size;
2675 if (TREE_CODE (base) == MEM_REF)
2677 if (!integer_zerop (TREE_OPERAND (base, 1)))
2679 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2681 *bit_offset += (mem_ref_offset (base).low
2686 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2687 base = valueize (TREE_OPERAND (base, 0));
2688 if (!base || TREE_CODE (base) != ADDR_EXPR)
2690 base = TREE_OPERAND (base, 0);
2693 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2694 DECL_INITIAL. If BASE is a nested reference into another
2695 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2696 the inner reference. */
2697 switch (TREE_CODE (base))
2700 if (!const_value_known_p (base))
2705 if (!DECL_INITIAL (base)
2706 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2707 return error_mark_node;
2708 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2709 as special marker (_not_ zero ...) for its own purposes. */
2710 if (DECL_INITIAL (base) == error_mark_node)
2712 return DECL_INITIAL (base);
2716 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2717 if (max_size == -1 || size != max_size)
2719 *bit_offset += bit_offset2;
2720 return get_base_constructor (base, bit_offset, valueize);
2731 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2732 to the memory at bit OFFSET.
2734 We do only simple job of folding byte accesses. */
2737 fold_string_cst_ctor_reference (tree type, tree ctor,
2738 unsigned HOST_WIDE_INT offset,
2739 unsigned HOST_WIDE_INT size)
2741 if (INTEGRAL_TYPE_P (type)
2742 && (TYPE_MODE (type)
2743 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2744 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2746 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2747 && size == BITS_PER_UNIT
2748 && !(offset % BITS_PER_UNIT))
2750 offset /= BITS_PER_UNIT;
2751 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2752 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2755 const char a[20]="hello";
2758 might lead to offset greater than string length. In this case we
2759 know value is either initialized to 0 or out of bounds. Return 0
2761 return build_zero_cst (type);
2766 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2767 SIZE to the memory at bit OFFSET. */
2770 fold_array_ctor_reference (tree type, tree ctor,
2771 unsigned HOST_WIDE_INT offset,
2772 unsigned HOST_WIDE_INT size)
2774 unsigned HOST_WIDE_INT cnt;
2776 double_int low_bound, elt_size;
2777 double_int index, max_index;
2778 double_int access_index;
2779 tree domain_type = NULL_TREE;
2780 HOST_WIDE_INT inner_offset;
2782 /* Compute low bound and elt size. */
2783 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2784 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2785 if (domain_type && TYPE_MIN_VALUE (domain_type))
2787 /* Static constructors for variably sized objects makes no sense. */
2788 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2789 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2792 low_bound = double_int_zero;
2793 /* Static constructors for variably sized objects makes no sense. */
2794 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2797 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2800 /* We can handle only constantly sized accesses that are known to not
2801 be larger than size of array element. */
2802 if (!TYPE_SIZE_UNIT (type)
2803 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2804 || double_int_cmp (elt_size,
2805 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2808 /* Compute the array index we look for. */
2809 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2810 elt_size, TRUNC_DIV_EXPR);
2811 access_index = double_int_add (access_index, low_bound);
2813 /* And offset within the access. */
2814 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2816 /* See if the array field is large enough to span whole access. We do not
2817 care to fold accesses spanning multiple array indexes. */
2818 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2821 index = double_int_sub (low_bound, double_int_one);
2822 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2824 /* Array constructor might explicitely set index, or specify range
2825 or leave index NULL meaning that it is next index after previous
2829 if (TREE_CODE (cfield) == INTEGER_CST)
2830 max_index = index = tree_to_double_int (cfield);
2833 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2834 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2835 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2839 max_index = index = double_int_add (index, double_int_one);
2841 /* Do we have match? */
2842 if (double_int_cmp (access_index, index, 1) >= 0
2843 && double_int_cmp (access_index, max_index, 1) <= 0)
2844 return fold_ctor_reference (type, cval, inner_offset, size);
2846 /* When memory is not explicitely mentioned in constructor,
2847 it is 0 (or out of range). */
2848 return build_zero_cst (type);
2851 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2852 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2855 fold_nonarray_ctor_reference (tree type, tree ctor,
2856 unsigned HOST_WIDE_INT offset,
2857 unsigned HOST_WIDE_INT size)
2859 unsigned HOST_WIDE_INT cnt;
2862 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2865 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2866 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2867 tree field_size = DECL_SIZE (cfield);
2868 double_int bitoffset;
2869 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2870 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2871 double_int bitoffset_end, access_end;
2873 /* Variable sized objects in static constructors makes no sense,
2874 but field_size can be NULL for flexible array members. */
2875 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2876 && TREE_CODE (byte_offset) == INTEGER_CST
2877 && (field_size != NULL_TREE
2878 ? TREE_CODE (field_size) == INTEGER_CST
2879 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2881 /* Compute bit offset of the field. */
2882 bitoffset = double_int_add (tree_to_double_int (field_offset),
2883 double_int_mul (byte_offset_cst,
2884 bits_per_unit_cst));
2885 /* Compute bit offset where the field ends. */
2886 if (field_size != NULL_TREE)
2887 bitoffset_end = double_int_add (bitoffset,
2888 tree_to_double_int (field_size));
2890 bitoffset_end = double_int_zero;
2892 access_end = double_int_add (uhwi_to_double_int (offset),
2893 uhwi_to_double_int (size));
2895 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2896 [BITOFFSET, BITOFFSET_END)? */
2897 if (double_int_cmp (access_end, bitoffset, 0) > 0
2898 && (field_size == NULL_TREE
2899 || double_int_cmp (uhwi_to_double_int (offset),
2900 bitoffset_end, 0) < 0))
2902 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2904 /* We do have overlap. Now see if field is large enough to
2905 cover the access. Give up for accesses spanning multiple
2907 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2909 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2911 return fold_ctor_reference (type, cval,
2912 double_int_to_uhwi (inner_offset), size);
2915 /* When memory is not explicitely mentioned in constructor, it is 0. */
2916 return build_zero_cst (type);
2919 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2920 to the memory at bit OFFSET. */
2923 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2924 unsigned HOST_WIDE_INT size)
2928 /* We found the field with exact match. */
2929 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2931 return canonicalize_constructor_val (ctor);
2933 /* We are at the end of walk, see if we can view convert the
2935 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2936 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2937 && operand_equal_p (TYPE_SIZE (type),
2938 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2940 ret = canonicalize_constructor_val (ctor);
2941 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2946 if (TREE_CODE (ctor) == STRING_CST)
2947 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2948 if (TREE_CODE (ctor) == CONSTRUCTOR)
2951 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2952 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2953 return fold_array_ctor_reference (type, ctor, offset, size);
2955 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2961 /* Return the tree representing the element referenced by T if T is an
2962 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2963 names using VALUEIZE. Return NULL_TREE otherwise. */
2966 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2968 tree ctor, idx, base;
2969 HOST_WIDE_INT offset, size, max_size;
2972 if (TREE_THIS_VOLATILE (t))
2975 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2976 return get_symbol_constant_value (t);
2978 tem = fold_read_from_constant_string (t);
2982 switch (TREE_CODE (t))
2985 case ARRAY_RANGE_REF:
2986 /* Constant indexes are handled well by get_base_constructor.
2987 Only special case variable offsets.
2988 FIXME: This code can't handle nested references with variable indexes
2989 (they will be handled only by iteration of ccp). Perhaps we can bring
2990 get_ref_base_and_extent here and make it use a valueize callback. */
2991 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2993 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2994 && host_integerp (idx, 0))
2996 tree low_bound, unit_size;
2998 /* If the resulting bit-offset is constant, track it. */
2999 if ((low_bound = array_ref_low_bound (t),
3000 host_integerp (low_bound, 0))
3001 && (unit_size = array_ref_element_size (t),
3002 host_integerp (unit_size, 1)))
3004 offset = TREE_INT_CST_LOW (idx);
3005 offset -= TREE_INT_CST_LOW (low_bound);
3006 offset *= TREE_INT_CST_LOW (unit_size);
3007 offset *= BITS_PER_UNIT;
3009 base = TREE_OPERAND (t, 0);
3010 ctor = get_base_constructor (base, &offset, valueize);
3011 /* Empty constructor. Always fold to 0. */
3012 if (ctor == error_mark_node)
3013 return build_zero_cst (TREE_TYPE (t));
3014 /* Out of bound array access. Value is undefined,
3018 /* We can not determine ctor. */
3021 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3022 TREE_INT_CST_LOW (unit_size)
3030 case TARGET_MEM_REF:
3032 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3033 ctor = get_base_constructor (base, &offset, valueize);
3035 /* Empty constructor. Always fold to 0. */
3036 if (ctor == error_mark_node)
3037 return build_zero_cst (TREE_TYPE (t));
3038 /* We do not know precise address. */
3039 if (max_size == -1 || max_size != size)
3041 /* We can not determine ctor. */
3045 /* Out of bound array access. Value is undefined, but don't fold. */
3049 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3054 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3055 if (c && TREE_CODE (c) == COMPLEX_CST)
3056 return fold_build1_loc (EXPR_LOCATION (t),
3057 TREE_CODE (t), TREE_TYPE (t), c);
3069 fold_const_aggregate_ref (tree t)
3071 return fold_const_aggregate_ref_1 (t, NULL);
3074 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3075 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3076 KNOWN_BINFO carries the binfo describing the true type of
3077 OBJ_TYPE_REF_OBJECT(REF). */
3080 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3082 unsigned HOST_WIDE_INT offset, size;
3085 v = BINFO_VTABLE (known_binfo);
3086 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3090 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3092 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3093 v = TREE_OPERAND (v, 0);
3098 if (TREE_CODE (v) != ADDR_EXPR)
3100 v = TREE_OPERAND (v, 0);
3102 if (TREE_CODE (v) != VAR_DECL
3103 || !DECL_VIRTUAL_P (v)
3104 || !DECL_INITIAL (v)
3105 || DECL_INITIAL (v) == error_mark_node)
3107 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3108 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3109 offset += token * size;
3110 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3112 if (!fn || integer_zerop (fn))
3114 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3115 || TREE_CODE (fn) == FDESC_EXPR);
3116 fn = TREE_OPERAND (fn, 0);
3117 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3119 /* When cgraph node is missing and function is not public, we cannot
3120 devirtualize. This can happen in WHOPR when the actual method
3121 ends up in other partition, because we found devirtualization
3122 possibility too late. */
3123 if (!can_refer_decl_in_current_unit_p (fn))
3129 /* Return true iff VAL is a gimple expression that is known to be
3130 non-negative. Restricted to floating-point inputs. */
3133 gimple_val_nonnegative_real_p (tree val)
3137 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3139 /* Use existing logic for non-gimple trees. */
3140 if (tree_expr_nonnegative_p (val))
3143 if (TREE_CODE (val) != SSA_NAME)
3146 /* Currently we look only at the immediately defining statement
3147 to make this determination, since recursion on defining
3148 statements of operands can lead to quadratic behavior in the
3149 worst case. This is expected to catch almost all occurrences
3150 in practice. It would be possible to implement limited-depth
3151 recursion if important cases are lost. Alternatively, passes
3152 that need this information (such as the pow/powi lowering code
3153 in the cse_sincos pass) could be revised to provide it through
3154 dataflow propagation. */
3156 def_stmt = SSA_NAME_DEF_STMT (val);
3158 if (is_gimple_assign (def_stmt))
3162 /* See fold-const.c:tree_expr_nonnegative_p for additional
3163 cases that could be handled with recursion. */
3165 switch (gimple_assign_rhs_code (def_stmt))
3168 /* Always true for floating-point operands. */
3172 /* True if the two operands are identical (since we are
3173 restricted to floating-point inputs). */
3174 op0 = gimple_assign_rhs1 (def_stmt);
3175 op1 = gimple_assign_rhs2 (def_stmt);
3178 || operand_equal_p (op0, op1, 0))
3185 else if (is_gimple_call (def_stmt))
3187 tree fndecl = gimple_call_fndecl (def_stmt);
3189 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3193 switch (DECL_FUNCTION_CODE (fndecl))
3195 CASE_FLT_FN (BUILT_IN_ACOS):
3196 CASE_FLT_FN (BUILT_IN_ACOSH):
3197 CASE_FLT_FN (BUILT_IN_CABS):
3198 CASE_FLT_FN (BUILT_IN_COSH):
3199 CASE_FLT_FN (BUILT_IN_ERFC):
3200 CASE_FLT_FN (BUILT_IN_EXP):
3201 CASE_FLT_FN (BUILT_IN_EXP10):
3202 CASE_FLT_FN (BUILT_IN_EXP2):
3203 CASE_FLT_FN (BUILT_IN_FABS):
3204 CASE_FLT_FN (BUILT_IN_FDIM):
3205 CASE_FLT_FN (BUILT_IN_HYPOT):
3206 CASE_FLT_FN (BUILT_IN_POW10):
3209 CASE_FLT_FN (BUILT_IN_SQRT):
3210 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3211 nonnegative inputs. */
3212 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3217 CASE_FLT_FN (BUILT_IN_POWI):
3218 /* True if the second argument is an even integer. */
3219 arg1 = gimple_call_arg (def_stmt, 1);
3221 if (TREE_CODE (arg1) == INTEGER_CST
3222 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3227 CASE_FLT_FN (BUILT_IN_POW):
3228 /* True if the second argument is an even integer-valued
3230 arg1 = gimple_call_arg (def_stmt, 1);
3232 if (TREE_CODE (arg1) == REAL_CST)
3237 c = TREE_REAL_CST (arg1);
3238 n = real_to_integer (&c);
3242 REAL_VALUE_TYPE cint;
3243 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3244 if (real_identical (&c, &cint))