1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node || !node->analyzed || node->global.inlined_to)
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval)
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
140 if (cfun && base && TREE_CODE (base) == VAR_DECL)
141 add_referenced_var (base);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
144 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
153 get_symbol_constant_value (tree sym)
155 if (const_value_known_p (sym))
157 tree val = DECL_INITIAL (sym);
160 val = canonicalize_constructor_val (val);
161 if (val && is_gimple_min_invariant (val))
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
172 return build_zero_cst (TREE_TYPE (sym));
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
186 maybe_fold_reference (tree expr, bool is_lhs)
191 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr) == REALPART_EXPR
193 || TREE_CODE (expr) == IMAGPART_EXPR)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr),
198 TREE_OPERAND (expr, 0));
199 else if (TREE_CODE (expr) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr),
204 TREE_OPERAND (expr, 0),
205 TREE_OPERAND (expr, 1),
206 TREE_OPERAND (expr, 2));
208 while (handled_component_p (*t))
209 t = &TREE_OPERAND (*t, 0);
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
216 bool volatile_p = TREE_THIS_VOLATILE (*t);
217 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
218 TREE_OPERAND (*t, 0),
219 TREE_OPERAND (*t, 1));
222 TREE_THIS_VOLATILE (tem) = volatile_p;
224 tem = maybe_fold_reference (expr, is_lhs);
232 && (result = fold_const_aggregate_ref (expr))
233 && is_gimple_min_invariant (result))
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t, 1))
240 && (TREE_THIS_VOLATILE (*t)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
249 && types_compatible_p (TREE_TYPE (*t),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t, 0), 0))))
254 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
255 tem = maybe_fold_reference (expr, is_lhs);
260 else if (TREE_CODE (*t) == TARGET_MEM_REF)
262 tree tem = maybe_fold_tmr (*t);
266 tem = maybe_fold_reference (expr, is_lhs);
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
283 fold_gimple_assign (gimple_stmt_iterator *si)
285 gimple stmt = gsi_stmt (*si);
286 enum tree_code subcode = gimple_assign_rhs_code (stmt);
287 location_t loc = gimple_location (stmt);
289 tree result = NULL_TREE;
291 switch (get_gimple_rhs_class (subcode))
293 case GIMPLE_SINGLE_RHS:
295 tree rhs = gimple_assign_rhs1 (stmt);
297 /* Try to fold a conditional expression. */
298 if (TREE_CODE (rhs) == COND_EXPR)
300 tree op0 = COND_EXPR_COND (rhs);
303 location_t cond_loc = EXPR_LOCATION (rhs);
305 if (COMPARISON_CLASS_P (op0))
307 fold_defer_overflow_warnings ();
308 tem = fold_binary_loc (cond_loc,
309 TREE_CODE (op0), TREE_TYPE (op0),
310 TREE_OPERAND (op0, 0),
311 TREE_OPERAND (op0, 1));
312 /* This is actually a conditional expression, not a GIMPLE
313 conditional statement, however, the valid_gimple_rhs_p
314 test still applies. */
315 set = (tem && is_gimple_condexpr (tem)
316 && valid_gimple_rhs_p (tem));
317 fold_undefer_overflow_warnings (set, stmt, 0);
319 else if (is_gimple_min_invariant (op0))
328 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
329 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
332 else if (REFERENCE_CLASS_P (rhs))
333 return maybe_fold_reference (rhs, false);
335 else if (TREE_CODE (rhs) == ADDR_EXPR)
337 tree ref = TREE_OPERAND (rhs, 0);
338 tree tem = maybe_fold_reference (ref, true);
340 && TREE_CODE (tem) == MEM_REF
341 && integer_zerop (TREE_OPERAND (tem, 1)))
342 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
344 result = fold_convert (TREE_TYPE (rhs),
345 build_fold_addr_expr_loc (loc, tem));
346 else if (TREE_CODE (ref) == MEM_REF
347 && integer_zerop (TREE_OPERAND (ref, 1)))
348 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
351 else if (TREE_CODE (rhs) == CONSTRUCTOR
352 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
353 && (CONSTRUCTOR_NELTS (rhs)
354 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
356 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
360 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
361 if (TREE_CODE (val) != INTEGER_CST
362 && TREE_CODE (val) != REAL_CST
363 && TREE_CODE (val) != FIXED_CST)
366 return build_vector_from_ctor (TREE_TYPE (rhs),
367 CONSTRUCTOR_ELTS (rhs));
370 else if (DECL_P (rhs))
371 return unshare_expr (get_symbol_constant_value (rhs));
373 /* If we couldn't fold the RHS, hand over to the generic
375 if (result == NULL_TREE)
378 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
379 that may have been added by fold, and "useless" type
380 conversions that might now be apparent due to propagation. */
381 STRIP_USELESS_TYPE_CONVERSION (result);
383 if (result != rhs && valid_gimple_rhs_p (result))
390 case GIMPLE_UNARY_RHS:
392 tree rhs = gimple_assign_rhs1 (stmt);
394 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
397 /* If the operation was a conversion do _not_ mark a
398 resulting constant with TREE_OVERFLOW if the original
399 constant was not. These conversions have implementation
400 defined behavior and retaining the TREE_OVERFLOW flag
401 here would confuse later passes such as VRP. */
402 if (CONVERT_EXPR_CODE_P (subcode)
403 && TREE_CODE (result) == INTEGER_CST
404 && TREE_CODE (rhs) == INTEGER_CST)
405 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
407 STRIP_USELESS_TYPE_CONVERSION (result);
408 if (valid_gimple_rhs_p (result))
414 case GIMPLE_BINARY_RHS:
415 /* Try to canonicalize for boolean-typed X the comparisons
416 X == 0, X == 1, X != 0, and X != 1. */
417 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
418 || gimple_assign_rhs_code (stmt) == NE_EXPR)
420 tree lhs = gimple_assign_lhs (stmt);
421 tree op1 = gimple_assign_rhs1 (stmt);
422 tree op2 = gimple_assign_rhs2 (stmt);
423 tree type = TREE_TYPE (op1);
425 /* Check whether the comparison operands are of the same boolean
426 type as the result type is.
427 Check that second operand is an integer-constant with value
429 if (TREE_CODE (op2) == INTEGER_CST
430 && (integer_zerop (op2) || integer_onep (op2))
431 && useless_type_conversion_p (TREE_TYPE (lhs), type))
433 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
434 bool is_logical_not = false;
436 /* X == 0 and X != 1 is a logical-not.of X
437 X == 1 and X != 0 is X */
438 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
439 || (cmp_code == NE_EXPR && integer_onep (op2)))
440 is_logical_not = true;
442 if (is_logical_not == false)
444 /* Only for one-bit precision typed X the transformation
445 !X -> ~X is valied. */
446 else if (TYPE_PRECISION (type) == 1)
447 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
449 /* Otherwise we use !X -> X ^ 1. */
451 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
452 type, op1, build_int_cst (type, 1));
458 result = fold_binary_loc (loc, subcode,
459 TREE_TYPE (gimple_assign_lhs (stmt)),
460 gimple_assign_rhs1 (stmt),
461 gimple_assign_rhs2 (stmt));
465 STRIP_USELESS_TYPE_CONVERSION (result);
466 if (valid_gimple_rhs_p (result))
471 case GIMPLE_TERNARY_RHS:
472 result = fold_ternary_loc (loc, subcode,
473 TREE_TYPE (gimple_assign_lhs (stmt)),
474 gimple_assign_rhs1 (stmt),
475 gimple_assign_rhs2 (stmt),
476 gimple_assign_rhs3 (stmt));
480 STRIP_USELESS_TYPE_CONVERSION (result);
481 if (valid_gimple_rhs_p (result))
486 case GIMPLE_INVALID_RHS:
493 /* Attempt to fold a conditional statement. Return true if any changes were
494 made. We only attempt to fold the condition expression, and do not perform
495 any transformation that would require alteration of the cfg. It is
496 assumed that the operands have been previously folded. */
499 fold_gimple_cond (gimple stmt)
501 tree result = fold_binary_loc (gimple_location (stmt),
502 gimple_cond_code (stmt),
504 gimple_cond_lhs (stmt),
505 gimple_cond_rhs (stmt));
509 STRIP_USELESS_TYPE_CONVERSION (result);
510 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
512 gimple_cond_set_condition_from_tree (stmt, result);
520 /* Convert EXPR into a GIMPLE value suitable for substitution on the
521 RHS of an assignment. Insert the necessary statements before
522 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
523 is replaced. If the call is expected to produces a result, then it
524 is replaced by an assignment of the new RHS to the result variable.
525 If the result is to be ignored, then the call is replaced by a
526 GIMPLE_NOP. A proper VDEF chain is retained by making the first
527 VUSE and the last VDEF of the whole sequence be the same as the replaced
528 statement and using new SSA names for stores in between. */
531 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
534 tree tmp = NULL_TREE; /* Silence warning. */
535 gimple stmt, new_stmt;
536 gimple_stmt_iterator i;
537 gimple_seq stmts = gimple_seq_alloc();
538 struct gimplify_ctx gctx;
540 gimple laststore = NULL;
543 stmt = gsi_stmt (*si_p);
545 gcc_assert (is_gimple_call (stmt));
547 lhs = gimple_call_lhs (stmt);
548 reaching_vuse = gimple_vuse (stmt);
550 push_gimplify_context (&gctx);
552 if (lhs == NULL_TREE)
554 gimplify_and_add (expr, &stmts);
555 /* We can end up with folding a memcpy of an empty class assignment
556 which gets optimized away by C++ gimplification. */
557 if (gimple_seq_empty_p (stmts))
559 pop_gimplify_context (NULL);
560 if (gimple_in_ssa_p (cfun))
562 unlink_stmt_vdef (stmt);
565 gsi_remove (si_p, true);
570 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
572 pop_gimplify_context (NULL);
574 if (gimple_has_location (stmt))
575 annotate_all_with_location (stmts, gimple_location (stmt));
577 /* The replacement can expose previously unreferenced variables. */
578 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
582 gsi_insert_before (si_p, last, GSI_NEW_STMT);
585 new_stmt = gsi_stmt (i);
586 if (gimple_in_ssa_p (cfun))
588 find_new_referenced_vars (new_stmt);
589 mark_symbols_for_renaming (new_stmt);
591 /* If the new statement has a VUSE, update it with exact SSA name we
592 know will reach this one. */
593 if (gimple_vuse (new_stmt))
595 /* If we've also seen a previous store create a new VDEF for
596 the latter one, and make that the new reaching VUSE. */
599 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
600 gimple_set_vdef (laststore, reaching_vuse);
601 update_stmt (laststore);
604 gimple_set_vuse (new_stmt, reaching_vuse);
605 gimple_set_modified (new_stmt, true);
607 if (gimple_assign_single_p (new_stmt)
608 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
610 laststore = new_stmt;
615 if (lhs == NULL_TREE)
617 /* If we replace a call without LHS that has a VDEF and our new
618 sequence ends with a store we must make that store have the same
619 vdef in order not to break the sequencing. This can happen
620 for instance when folding memcpy calls into assignments. */
621 if (gimple_vdef (stmt) && laststore)
623 gimple_set_vdef (laststore, gimple_vdef (stmt));
624 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
625 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
626 update_stmt (laststore);
628 else if (gimple_in_ssa_p (cfun))
630 unlink_stmt_vdef (stmt);
639 gsi_insert_before (si_p, last, GSI_NEW_STMT);
642 if (laststore && is_gimple_reg (lhs))
644 gimple_set_vdef (laststore, gimple_vdef (stmt));
645 update_stmt (laststore);
646 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
647 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
652 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
653 gimple_set_vdef (laststore, reaching_vuse);
654 update_stmt (laststore);
657 new_stmt = gimple_build_assign (lhs, tmp);
658 if (!is_gimple_reg (tmp))
659 gimple_set_vuse (new_stmt, reaching_vuse);
660 if (!is_gimple_reg (lhs))
662 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
663 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
664 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
666 else if (reaching_vuse == gimple_vuse (stmt))
667 unlink_stmt_vdef (stmt);
670 gimple_set_location (new_stmt, gimple_location (stmt));
671 gsi_replace (si_p, new_stmt, false);
674 /* Return the string length, maximum string length or maximum value of
676 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
677 is not NULL and, for TYPE == 0, its value is not equal to the length
678 we determine or if we are unable to determine the length or value,
679 return false. VISITED is a bitmap of visited variables.
680 TYPE is 0 if string length should be returned, 1 for maximum string
681 length and 2 for maximum value ARG can have. */
684 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
689 if (TREE_CODE (arg) != SSA_NAME)
691 if (TREE_CODE (arg) == COND_EXPR)
692 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
693 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
694 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
695 else if (TREE_CODE (arg) == ADDR_EXPR
696 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
697 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
699 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
700 if (TREE_CODE (aop0) == INDIRECT_REF
701 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
702 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
703 length, visited, type);
709 if (TREE_CODE (val) != INTEGER_CST
710 || tree_int_cst_sgn (val) < 0)
714 val = c_strlen (arg, 1);
722 if (TREE_CODE (*length) != INTEGER_CST
723 || TREE_CODE (val) != INTEGER_CST)
726 if (tree_int_cst_lt (*length, val))
730 else if (simple_cst_equal (val, *length) != 1)
738 /* If we were already here, break the infinite cycle. */
739 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
743 def_stmt = SSA_NAME_DEF_STMT (var);
745 switch (gimple_code (def_stmt))
748 /* The RHS of the statement defining VAR must either have a
749 constant length or come from another SSA_NAME with a constant
751 if (gimple_assign_single_p (def_stmt)
752 || gimple_assign_unary_nop_p (def_stmt))
754 tree rhs = gimple_assign_rhs1 (def_stmt);
755 return get_maxval_strlen (rhs, length, visited, type);
761 /* All the arguments of the PHI node must have the same constant
765 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
767 tree arg = gimple_phi_arg (def_stmt, i)->def;
769 /* If this PHI has itself as an argument, we cannot
770 determine the string length of this argument. However,
771 if we can find a constant string length for the other
772 PHI args then we can still be sure that this is a
773 constant string length. So be optimistic and just
774 continue with the next argument. */
775 if (arg == gimple_phi_result (def_stmt))
778 if (!get_maxval_strlen (arg, length, visited, type))
790 /* Fold builtin call in statement STMT. Returns a simplified tree.
791 We may return a non-constant expression, including another call
792 to a different function and with different arguments, e.g.,
793 substituting memcpy for strcpy when the string length is known.
794 Note that some builtins expand into inline code that may not
795 be valid in GIMPLE. Callers must take care. */
798 gimple_fold_builtin (gimple stmt)
806 location_t loc = gimple_location (stmt);
808 gcc_assert (is_gimple_call (stmt));
810 ignore = (gimple_call_lhs (stmt) == NULL);
812 /* First try the generic builtin folder. If that succeeds, return the
814 result = fold_call_stmt (stmt, ignore);
822 /* Ignore MD builtins. */
823 callee = gimple_call_fndecl (stmt);
824 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
827 /* If the builtin could not be folded, and it has no argument list,
829 nargs = gimple_call_num_args (stmt);
833 /* Limit the work only for builtins we know how to simplify. */
834 switch (DECL_FUNCTION_CODE (callee))
836 case BUILT_IN_STRLEN:
838 case BUILT_IN_FPUTS_UNLOCKED:
842 case BUILT_IN_STRCPY:
843 case BUILT_IN_STRNCPY:
847 case BUILT_IN_MEMCPY_CHK:
848 case BUILT_IN_MEMPCPY_CHK:
849 case BUILT_IN_MEMMOVE_CHK:
850 case BUILT_IN_MEMSET_CHK:
851 case BUILT_IN_STRNCPY_CHK:
855 case BUILT_IN_STRCPY_CHK:
856 case BUILT_IN_STPCPY_CHK:
860 case BUILT_IN_SNPRINTF_CHK:
861 case BUILT_IN_VSNPRINTF_CHK:
869 if (arg_idx >= nargs)
872 /* Try to use the dataflow information gathered by the CCP process. */
873 visited = BITMAP_ALLOC (NULL);
874 bitmap_clear (visited);
876 memset (val, 0, sizeof (val));
877 a = gimple_call_arg (stmt, arg_idx);
878 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
879 val[arg_idx] = NULL_TREE;
881 BITMAP_FREE (visited);
884 switch (DECL_FUNCTION_CODE (callee))
886 case BUILT_IN_STRLEN:
887 if (val[0] && nargs == 1)
890 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
892 /* If the result is not a valid gimple value, or not a cast
893 of a valid gimple value, then we cannot use the result. */
894 if (is_gimple_val (new_val)
895 || (CONVERT_EXPR_P (new_val)
896 && is_gimple_val (TREE_OPERAND (new_val, 0))))
901 case BUILT_IN_STRCPY:
902 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
903 result = fold_builtin_strcpy (loc, callee,
904 gimple_call_arg (stmt, 0),
905 gimple_call_arg (stmt, 1),
909 case BUILT_IN_STRNCPY:
910 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
911 result = fold_builtin_strncpy (loc, callee,
912 gimple_call_arg (stmt, 0),
913 gimple_call_arg (stmt, 1),
914 gimple_call_arg (stmt, 2),
920 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
921 gimple_call_arg (stmt, 1),
922 ignore, false, val[0]);
925 case BUILT_IN_FPUTS_UNLOCKED:
927 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
928 gimple_call_arg (stmt, 1),
929 ignore, true, val[0]);
932 case BUILT_IN_MEMCPY_CHK:
933 case BUILT_IN_MEMPCPY_CHK:
934 case BUILT_IN_MEMMOVE_CHK:
935 case BUILT_IN_MEMSET_CHK:
936 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
937 result = fold_builtin_memory_chk (loc, callee,
938 gimple_call_arg (stmt, 0),
939 gimple_call_arg (stmt, 1),
940 gimple_call_arg (stmt, 2),
941 gimple_call_arg (stmt, 3),
943 DECL_FUNCTION_CODE (callee));
946 case BUILT_IN_STRCPY_CHK:
947 case BUILT_IN_STPCPY_CHK:
948 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
949 result = fold_builtin_stxcpy_chk (loc, callee,
950 gimple_call_arg (stmt, 0),
951 gimple_call_arg (stmt, 1),
952 gimple_call_arg (stmt, 2),
954 DECL_FUNCTION_CODE (callee));
957 case BUILT_IN_STRNCPY_CHK:
958 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
959 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
960 gimple_call_arg (stmt, 1),
961 gimple_call_arg (stmt, 2),
962 gimple_call_arg (stmt, 3),
966 case BUILT_IN_SNPRINTF_CHK:
967 case BUILT_IN_VSNPRINTF_CHK:
968 if (val[1] && is_gimple_val (val[1]))
969 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
970 DECL_FUNCTION_CODE (callee));
977 if (result && ignore)
978 result = fold_ignored_result (result);
982 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
983 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
984 KNOWN_BINFO carries the binfo describing the true type of
985 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
986 with a this adjustment, the constant which should be added to this pointer
987 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
988 is a thunk (other than a this adjustment which is dealt with by DELTA). */
991 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
997 v = BINFO_VIRTUALS (known_binfo);
998 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1004 i += (TARGET_VTABLE_USES_DESCRIPTORS
1005 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1009 /* If BV_VCALL_INDEX is non-NULL, give up. */
1013 fndecl = TREE_VALUE (v);
1015 /* When cgraph node is missing and function is not public, we cannot
1016 devirtualize. This can happen in WHOPR when the actual method
1017 ends up in other partition, because we found devirtualization
1018 possibility too late. */
1019 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1022 *delta = TREE_PURPOSE (v);
1023 gcc_checking_assert (host_integerp (*delta, 0));
1027 /* Generate code adjusting the first parameter of a call statement determined
1031 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1033 gimple call_stmt = gsi_stmt (*gsi);
1037 delta = convert_to_ptrofftype (delta);
1038 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1039 parm = gimple_call_arg (call_stmt, 0);
1040 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1041 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1042 add_referenced_var (tmp);
1044 tmp = make_ssa_name (tmp, NULL);
1045 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1046 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1047 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1048 gimple_call_set_arg (call_stmt, 0, tmp);
1051 /* Return a binfo to be used for devirtualization of calls based on an object
1052 represented by a declaration (i.e. a global or automatically allocated one)
1053 or NULL if it cannot be found or is not safe. CST is expected to be an
1054 ADDR_EXPR of such object or the function will return NULL. Currently it is
1055 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1058 gimple_extract_devirt_binfo_from_cst (tree cst)
1060 HOST_WIDE_INT offset, size, max_size;
1061 tree base, type, expected_type, binfo;
1062 bool last_artificial = false;
1064 if (!flag_devirtualize
1065 || TREE_CODE (cst) != ADDR_EXPR
1066 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1069 cst = TREE_OPERAND (cst, 0);
1070 expected_type = TREE_TYPE (cst);
1071 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1072 type = TREE_TYPE (base);
1076 || TREE_CODE (type) != RECORD_TYPE)
1079 /* Find the sub-object the constant actually refers to and mark whether it is
1080 an artificial one (as opposed to a user-defined one). */
1083 HOST_WIDE_INT pos, size;
1086 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1091 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1093 if (TREE_CODE (fld) != FIELD_DECL)
1096 pos = int_bit_position (fld);
1097 size = tree_low_cst (DECL_SIZE (fld), 1);
1098 if (pos <= offset && (pos + size) > offset)
1101 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1104 last_artificial = DECL_ARTIFICIAL (fld);
1105 type = TREE_TYPE (fld);
1108 /* Artifical sub-objects are ancestors, we do not want to use them for
1109 devirtualization, at least not here. */
1110 if (last_artificial)
1112 binfo = TYPE_BINFO (type);
1113 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1119 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1120 The statement may be replaced by another statement, e.g., if the call
1121 simplifies to a constant value. Return true if any changes were made.
1122 It is assumed that the operands have been previously folded. */
1125 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1127 gimple stmt = gsi_stmt (*gsi);
1130 /* Check for builtins that CCP can handle using information not
1131 available in the generic fold routines. */
1132 callee = gimple_call_fndecl (stmt);
1133 if (!inplace && callee && DECL_BUILT_IN (callee))
1135 tree result = gimple_fold_builtin (stmt);
1139 if (!update_call_from_tree (gsi, result))
1140 gimplify_and_update_call_from_tree (gsi, result);
1145 /* Check for virtual calls that became direct calls. */
1146 callee = gimple_call_fn (stmt);
1147 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1149 tree binfo, fndecl, delta, obj;
1150 HOST_WIDE_INT token;
1152 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1154 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1158 obj = OBJ_TYPE_REF_OBJECT (callee);
1159 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1162 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1163 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta);
1166 gcc_assert (integer_zerop (delta));
1167 gimple_call_set_fndecl (stmt, fndecl);
1174 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1175 distinguishes both cases. */
1178 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1180 bool changed = false;
1181 gimple stmt = gsi_stmt (*gsi);
1183 gimple_stmt_iterator gsinext = *gsi;
1186 gsi_next (&gsinext);
1187 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1189 /* Fold the main computation performed by the statement. */
1190 switch (gimple_code (stmt))
1194 unsigned old_num_ops = gimple_num_ops (stmt);
1195 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1196 tree lhs = gimple_assign_lhs (stmt);
1198 /* First canonicalize operand order. This avoids building new
1199 trees if this is the only thing fold would later do. */
1200 if ((commutative_tree_code (subcode)
1201 || commutative_ternary_tree_code (subcode))
1202 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1203 gimple_assign_rhs2 (stmt), false))
1205 tree tem = gimple_assign_rhs1 (stmt);
1206 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1207 gimple_assign_set_rhs2 (stmt, tem);
1210 new_rhs = fold_gimple_assign (gsi);
1212 && !useless_type_conversion_p (TREE_TYPE (lhs),
1213 TREE_TYPE (new_rhs)))
1214 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1217 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1219 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1226 changed |= fold_gimple_cond (stmt);
1230 /* Fold *& in call arguments. */
1231 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1232 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1234 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1237 gimple_call_set_arg (stmt, i, tmp);
1241 changed |= gimple_fold_call (gsi, inplace);
1245 /* Fold *& in asm operands. */
1246 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1248 tree link = gimple_asm_output_op (stmt, i);
1249 tree op = TREE_VALUE (link);
1250 if (REFERENCE_CLASS_P (op)
1251 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1253 TREE_VALUE (link) = op;
1257 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1259 tree link = gimple_asm_input_op (stmt, i);
1260 tree op = TREE_VALUE (link);
1261 if (REFERENCE_CLASS_P (op)
1262 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1264 TREE_VALUE (link) = op;
1271 if (gimple_debug_bind_p (stmt))
1273 tree val = gimple_debug_bind_get_value (stmt);
1275 && REFERENCE_CLASS_P (val))
1277 tree tem = maybe_fold_reference (val, false);
1280 gimple_debug_bind_set_value (stmt, tem);
1290 /* If stmt folds into nothing and it was the last stmt in a bb,
1291 don't call gsi_stmt. */
1292 if (gsi_end_p (*gsi))
1294 gcc_assert (next_stmt == NULL);
1298 stmt = gsi_stmt (*gsi);
1300 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1301 as we'd changing the next stmt. */
1302 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1304 tree lhs = gimple_get_lhs (stmt);
1305 if (lhs && REFERENCE_CLASS_P (lhs))
1307 tree new_lhs = maybe_fold_reference (lhs, true);
1310 gimple_set_lhs (stmt, new_lhs);
1319 /* Fold the statement pointed to by GSI. In some cases, this function may
1320 replace the whole statement with a new one. Returns true iff folding
1322 The statement pointed to by GSI should be in valid gimple form but may
1323 be in unfolded state as resulting from for example constant propagation
1324 which can produce *&x = 0. */
1327 fold_stmt (gimple_stmt_iterator *gsi)
1329 return fold_stmt_1 (gsi, false);
1332 /* Perform the minimal folding on statement STMT. Only operations like
1333 *&x created by constant propagation are handled. The statement cannot
1334 be replaced with a new one. Return true if the statement was
1335 changed, false otherwise.
1336 The statement STMT should be in valid gimple form but may
1337 be in unfolded state as resulting from for example constant propagation
1338 which can produce *&x = 0. */
1341 fold_stmt_inplace (gimple stmt)
1343 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1344 bool changed = fold_stmt_1 (&gsi, true);
1345 gcc_assert (gsi_stmt (gsi) == stmt);
1349 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1350 if EXPR is null or we don't know how.
1351 If non-null, the result always has boolean type. */
1354 canonicalize_bool (tree expr, bool invert)
1360 if (integer_nonzerop (expr))
1361 return boolean_false_node;
1362 else if (integer_zerop (expr))
1363 return boolean_true_node;
1364 else if (TREE_CODE (expr) == SSA_NAME)
1365 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1366 build_int_cst (TREE_TYPE (expr), 0));
1367 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1368 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1370 TREE_OPERAND (expr, 0),
1371 TREE_OPERAND (expr, 1));
1377 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1379 if (integer_nonzerop (expr))
1380 return boolean_true_node;
1381 else if (integer_zerop (expr))
1382 return boolean_false_node;
1383 else if (TREE_CODE (expr) == SSA_NAME)
1384 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1385 build_int_cst (TREE_TYPE (expr), 0));
1386 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1387 return fold_build2 (TREE_CODE (expr),
1389 TREE_OPERAND (expr, 0),
1390 TREE_OPERAND (expr, 1));
1396 /* Check to see if a boolean expression EXPR is logically equivalent to the
1397 comparison (OP1 CODE OP2). Check for various identities involving
1401 same_bool_comparison_p (const_tree expr, enum tree_code code,
1402 const_tree op1, const_tree op2)
1406 /* The obvious case. */
1407 if (TREE_CODE (expr) == code
1408 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1409 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1412 /* Check for comparing (name, name != 0) and the case where expr
1413 is an SSA_NAME with a definition matching the comparison. */
1414 if (TREE_CODE (expr) == SSA_NAME
1415 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1417 if (operand_equal_p (expr, op1, 0))
1418 return ((code == NE_EXPR && integer_zerop (op2))
1419 || (code == EQ_EXPR && integer_nonzerop (op2)));
1420 s = SSA_NAME_DEF_STMT (expr);
1421 if (is_gimple_assign (s)
1422 && gimple_assign_rhs_code (s) == code
1423 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1424 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1428 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1429 of name is a comparison, recurse. */
1430 if (TREE_CODE (op1) == SSA_NAME
1431 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1433 s = SSA_NAME_DEF_STMT (op1);
1434 if (is_gimple_assign (s)
1435 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1437 enum tree_code c = gimple_assign_rhs_code (s);
1438 if ((c == NE_EXPR && integer_zerop (op2))
1439 || (c == EQ_EXPR && integer_nonzerop (op2)))
1440 return same_bool_comparison_p (expr, c,
1441 gimple_assign_rhs1 (s),
1442 gimple_assign_rhs2 (s));
1443 if ((c == EQ_EXPR && integer_zerop (op2))
1444 || (c == NE_EXPR && integer_nonzerop (op2)))
1445 return same_bool_comparison_p (expr,
1446 invert_tree_comparison (c, false),
1447 gimple_assign_rhs1 (s),
1448 gimple_assign_rhs2 (s));
1454 /* Check to see if two boolean expressions OP1 and OP2 are logically
1458 same_bool_result_p (const_tree op1, const_tree op2)
1460 /* Simple cases first. */
1461 if (operand_equal_p (op1, op2, 0))
1464 /* Check the cases where at least one of the operands is a comparison.
1465 These are a bit smarter than operand_equal_p in that they apply some
1466 identifies on SSA_NAMEs. */
1467 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1468 && same_bool_comparison_p (op1, TREE_CODE (op2),
1469 TREE_OPERAND (op2, 0),
1470 TREE_OPERAND (op2, 1)))
1472 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1473 && same_bool_comparison_p (op2, TREE_CODE (op1),
1474 TREE_OPERAND (op1, 0),
1475 TREE_OPERAND (op1, 1)))
1482 /* Forward declarations for some mutually recursive functions. */
1485 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1486 enum tree_code code2, tree op2a, tree op2b);
1488 and_var_with_comparison (tree var, bool invert,
1489 enum tree_code code2, tree op2a, tree op2b);
1491 and_var_with_comparison_1 (gimple stmt,
1492 enum tree_code code2, tree op2a, tree op2b);
1494 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1495 enum tree_code code2, tree op2a, tree op2b);
1497 or_var_with_comparison (tree var, bool invert,
1498 enum tree_code code2, tree op2a, tree op2b);
1500 or_var_with_comparison_1 (gimple stmt,
1501 enum tree_code code2, tree op2a, tree op2b);
1503 /* Helper function for and_comparisons_1: try to simplify the AND of the
1504 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1505 If INVERT is true, invert the value of the VAR before doing the AND.
1506 Return NULL_EXPR if we can't simplify this to a single expression. */
1509 and_var_with_comparison (tree var, bool invert,
1510 enum tree_code code2, tree op2a, tree op2b)
1513 gimple stmt = SSA_NAME_DEF_STMT (var);
1515 /* We can only deal with variables whose definitions are assignments. */
1516 if (!is_gimple_assign (stmt))
1519 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1520 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1521 Then we only have to consider the simpler non-inverted cases. */
1523 t = or_var_with_comparison_1 (stmt,
1524 invert_tree_comparison (code2, false),
1527 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1528 return canonicalize_bool (t, invert);
1531 /* Try to simplify the AND of the ssa variable defined by the assignment
1532 STMT with the comparison specified by (OP2A CODE2 OP2B).
1533 Return NULL_EXPR if we can't simplify this to a single expression. */
1536 and_var_with_comparison_1 (gimple stmt,
1537 enum tree_code code2, tree op2a, tree op2b)
1539 tree var = gimple_assign_lhs (stmt);
1540 tree true_test_var = NULL_TREE;
1541 tree false_test_var = NULL_TREE;
1542 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1544 /* Check for identities like (var AND (var == 0)) => false. */
1545 if (TREE_CODE (op2a) == SSA_NAME
1546 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1548 if ((code2 == NE_EXPR && integer_zerop (op2b))
1549 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1551 true_test_var = op2a;
1552 if (var == true_test_var)
1555 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1556 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1558 false_test_var = op2a;
1559 if (var == false_test_var)
1560 return boolean_false_node;
1564 /* If the definition is a comparison, recurse on it. */
1565 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1567 tree t = and_comparisons_1 (innercode,
1568 gimple_assign_rhs1 (stmt),
1569 gimple_assign_rhs2 (stmt),
1577 /* If the definition is an AND or OR expression, we may be able to
1578 simplify by reassociating. */
1579 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1580 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1582 tree inner1 = gimple_assign_rhs1 (stmt);
1583 tree inner2 = gimple_assign_rhs2 (stmt);
1586 tree partial = NULL_TREE;
1587 bool is_and = (innercode == BIT_AND_EXPR);
1589 /* Check for boolean identities that don't require recursive examination
1591 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1592 inner1 AND (inner1 OR inner2) => inner1
1593 !inner1 AND (inner1 AND inner2) => false
1594 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1595 Likewise for similar cases involving inner2. */
1596 if (inner1 == true_test_var)
1597 return (is_and ? var : inner1);
1598 else if (inner2 == true_test_var)
1599 return (is_and ? var : inner2);
1600 else if (inner1 == false_test_var)
1602 ? boolean_false_node
1603 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1604 else if (inner2 == false_test_var)
1606 ? boolean_false_node
1607 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1609 /* Next, redistribute/reassociate the AND across the inner tests.
1610 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1611 if (TREE_CODE (inner1) == SSA_NAME
1612 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1613 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1614 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1615 gimple_assign_rhs1 (s),
1616 gimple_assign_rhs2 (s),
1617 code2, op2a, op2b)))
1619 /* Handle the AND case, where we are reassociating:
1620 (inner1 AND inner2) AND (op2a code2 op2b)
1622 If the partial result t is a constant, we win. Otherwise
1623 continue on to try reassociating with the other inner test. */
1626 if (integer_onep (t))
1628 else if (integer_zerop (t))
1629 return boolean_false_node;
1632 /* Handle the OR case, where we are redistributing:
1633 (inner1 OR inner2) AND (op2a code2 op2b)
1634 => (t OR (inner2 AND (op2a code2 op2b))) */
1635 else if (integer_onep (t))
1636 return boolean_true_node;
1638 /* Save partial result for later. */
1642 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1643 if (TREE_CODE (inner2) == SSA_NAME
1644 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1645 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1646 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1647 gimple_assign_rhs1 (s),
1648 gimple_assign_rhs2 (s),
1649 code2, op2a, op2b)))
1651 /* Handle the AND case, where we are reassociating:
1652 (inner1 AND inner2) AND (op2a code2 op2b)
1653 => (inner1 AND t) */
1656 if (integer_onep (t))
1658 else if (integer_zerop (t))
1659 return boolean_false_node;
1660 /* If both are the same, we can apply the identity
1662 else if (partial && same_bool_result_p (t, partial))
1666 /* Handle the OR case. where we are redistributing:
1667 (inner1 OR inner2) AND (op2a code2 op2b)
1668 => (t OR (inner1 AND (op2a code2 op2b)))
1669 => (t OR partial) */
1672 if (integer_onep (t))
1673 return boolean_true_node;
1676 /* We already got a simplification for the other
1677 operand to the redistributed OR expression. The
1678 interesting case is when at least one is false.
1679 Or, if both are the same, we can apply the identity
1681 if (integer_zerop (partial))
1683 else if (integer_zerop (t))
1685 else if (same_bool_result_p (t, partial))
1694 /* Try to simplify the AND of two comparisons defined by
1695 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1696 If this can be done without constructing an intermediate value,
1697 return the resulting tree; otherwise NULL_TREE is returned.
1698 This function is deliberately asymmetric as it recurses on SSA_DEFs
1699 in the first comparison but not the second. */
1702 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1703 enum tree_code code2, tree op2a, tree op2b)
1705 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1706 if (operand_equal_p (op1a, op2a, 0)
1707 && operand_equal_p (op1b, op2b, 0))
1709 /* Result will be either NULL_TREE, or a combined comparison. */
1710 tree t = combine_comparisons (UNKNOWN_LOCATION,
1711 TRUTH_ANDIF_EXPR, code1, code2,
1712 boolean_type_node, op1a, op1b);
1717 /* Likewise the swapped case of the above. */
1718 if (operand_equal_p (op1a, op2b, 0)
1719 && operand_equal_p (op1b, op2a, 0))
1721 /* Result will be either NULL_TREE, or a combined comparison. */
1722 tree t = combine_comparisons (UNKNOWN_LOCATION,
1723 TRUTH_ANDIF_EXPR, code1,
1724 swap_tree_comparison (code2),
1725 boolean_type_node, op1a, op1b);
1730 /* If both comparisons are of the same value against constants, we might
1731 be able to merge them. */
1732 if (operand_equal_p (op1a, op2a, 0)
1733 && TREE_CODE (op1b) == INTEGER_CST
1734 && TREE_CODE (op2b) == INTEGER_CST)
1736 int cmp = tree_int_cst_compare (op1b, op2b);
1738 /* If we have (op1a == op1b), we should either be able to
1739 return that or FALSE, depending on whether the constant op1b
1740 also satisfies the other comparison against op2b. */
1741 if (code1 == EQ_EXPR)
1747 case EQ_EXPR: val = (cmp == 0); break;
1748 case NE_EXPR: val = (cmp != 0); break;
1749 case LT_EXPR: val = (cmp < 0); break;
1750 case GT_EXPR: val = (cmp > 0); break;
1751 case LE_EXPR: val = (cmp <= 0); break;
1752 case GE_EXPR: val = (cmp >= 0); break;
1753 default: done = false;
1758 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1760 return boolean_false_node;
1763 /* Likewise if the second comparison is an == comparison. */
1764 else if (code2 == EQ_EXPR)
1770 case EQ_EXPR: val = (cmp == 0); break;
1771 case NE_EXPR: val = (cmp != 0); break;
1772 case LT_EXPR: val = (cmp > 0); break;
1773 case GT_EXPR: val = (cmp < 0); break;
1774 case LE_EXPR: val = (cmp >= 0); break;
1775 case GE_EXPR: val = (cmp <= 0); break;
1776 default: done = false;
1781 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1783 return boolean_false_node;
1787 /* Same business with inequality tests. */
1788 else if (code1 == NE_EXPR)
1793 case EQ_EXPR: val = (cmp != 0); break;
1794 case NE_EXPR: val = (cmp == 0); break;
1795 case LT_EXPR: val = (cmp >= 0); break;
1796 case GT_EXPR: val = (cmp <= 0); break;
1797 case LE_EXPR: val = (cmp > 0); break;
1798 case GE_EXPR: val = (cmp < 0); break;
1803 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1805 else if (code2 == NE_EXPR)
1810 case EQ_EXPR: val = (cmp == 0); break;
1811 case NE_EXPR: val = (cmp != 0); break;
1812 case LT_EXPR: val = (cmp <= 0); break;
1813 case GT_EXPR: val = (cmp >= 0); break;
1814 case LE_EXPR: val = (cmp < 0); break;
1815 case GE_EXPR: val = (cmp > 0); break;
1820 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1823 /* Chose the more restrictive of two < or <= comparisons. */
1824 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1825 && (code2 == LT_EXPR || code2 == LE_EXPR))
1827 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1828 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1830 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1833 /* Likewise chose the more restrictive of two > or >= comparisons. */
1834 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1835 && (code2 == GT_EXPR || code2 == GE_EXPR))
1837 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1838 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1840 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1843 /* Check for singleton ranges. */
1845 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1846 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1847 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1849 /* Check for disjoint ranges. */
1851 && (code1 == LT_EXPR || code1 == LE_EXPR)
1852 && (code2 == GT_EXPR || code2 == GE_EXPR))
1853 return boolean_false_node;
1855 && (code1 == GT_EXPR || code1 == GE_EXPR)
1856 && (code2 == LT_EXPR || code2 == LE_EXPR))
1857 return boolean_false_node;
1860 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1861 NAME's definition is a truth value. See if there are any simplifications
1862 that can be done against the NAME's definition. */
1863 if (TREE_CODE (op1a) == SSA_NAME
1864 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1865 && (integer_zerop (op1b) || integer_onep (op1b)))
1867 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1868 || (code1 == NE_EXPR && integer_onep (op1b)));
1869 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1870 switch (gimple_code (stmt))
1873 /* Try to simplify by copy-propagating the definition. */
1874 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1877 /* If every argument to the PHI produces the same result when
1878 ANDed with the second comparison, we win.
1879 Do not do this unless the type is bool since we need a bool
1880 result here anyway. */
1881 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1883 tree result = NULL_TREE;
1885 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1887 tree arg = gimple_phi_arg_def (stmt, i);
1889 /* If this PHI has itself as an argument, ignore it.
1890 If all the other args produce the same result,
1892 if (arg == gimple_phi_result (stmt))
1894 else if (TREE_CODE (arg) == INTEGER_CST)
1896 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1899 result = boolean_false_node;
1900 else if (!integer_zerop (result))
1904 result = fold_build2 (code2, boolean_type_node,
1906 else if (!same_bool_comparison_p (result,
1910 else if (TREE_CODE (arg) == SSA_NAME
1911 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1914 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1915 /* In simple cases we can look through PHI nodes,
1916 but we have to be careful with loops.
1918 if (! dom_info_available_p (CDI_DOMINATORS)
1919 || gimple_bb (def_stmt) == gimple_bb (stmt)
1920 || dominated_by_p (CDI_DOMINATORS,
1921 gimple_bb (def_stmt),
1924 temp = and_var_with_comparison (arg, invert, code2,
1930 else if (!same_bool_result_p (result, temp))
1946 /* Try to simplify the AND of two comparisons, specified by
1947 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1948 If this can be simplified to a single expression (without requiring
1949 introducing more SSA variables to hold intermediate values),
1950 return the resulting tree. Otherwise return NULL_TREE.
1951 If the result expression is non-null, it has boolean type. */
1954 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1955 enum tree_code code2, tree op2a, tree op2b)
1957 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1961 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1964 /* Helper function for or_comparisons_1: try to simplify the OR of the
1965 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1966 If INVERT is true, invert the value of VAR before doing the OR.
1967 Return NULL_EXPR if we can't simplify this to a single expression. */
1970 or_var_with_comparison (tree var, bool invert,
1971 enum tree_code code2, tree op2a, tree op2b)
1974 gimple stmt = SSA_NAME_DEF_STMT (var);
1976 /* We can only deal with variables whose definitions are assignments. */
1977 if (!is_gimple_assign (stmt))
1980 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1981 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1982 Then we only have to consider the simpler non-inverted cases. */
1984 t = and_var_with_comparison_1 (stmt,
1985 invert_tree_comparison (code2, false),
1988 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1989 return canonicalize_bool (t, invert);
1992 /* Try to simplify the OR of the ssa variable defined by the assignment
1993 STMT with the comparison specified by (OP2A CODE2 OP2B).
1994 Return NULL_EXPR if we can't simplify this to a single expression. */
1997 or_var_with_comparison_1 (gimple stmt,
1998 enum tree_code code2, tree op2a, tree op2b)
2000 tree var = gimple_assign_lhs (stmt);
2001 tree true_test_var = NULL_TREE;
2002 tree false_test_var = NULL_TREE;
2003 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2005 /* Check for identities like (var OR (var != 0)) => true . */
2006 if (TREE_CODE (op2a) == SSA_NAME
2007 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2009 if ((code2 == NE_EXPR && integer_zerop (op2b))
2010 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2012 true_test_var = op2a;
2013 if (var == true_test_var)
2016 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2017 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2019 false_test_var = op2a;
2020 if (var == false_test_var)
2021 return boolean_true_node;
2025 /* If the definition is a comparison, recurse on it. */
2026 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2028 tree t = or_comparisons_1 (innercode,
2029 gimple_assign_rhs1 (stmt),
2030 gimple_assign_rhs2 (stmt),
2038 /* If the definition is an AND or OR expression, we may be able to
2039 simplify by reassociating. */
2040 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2041 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2043 tree inner1 = gimple_assign_rhs1 (stmt);
2044 tree inner2 = gimple_assign_rhs2 (stmt);
2047 tree partial = NULL_TREE;
2048 bool is_or = (innercode == BIT_IOR_EXPR);
2050 /* Check for boolean identities that don't require recursive examination
2052 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2053 inner1 OR (inner1 AND inner2) => inner1
2054 !inner1 OR (inner1 OR inner2) => true
2055 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2057 if (inner1 == true_test_var)
2058 return (is_or ? var : inner1);
2059 else if (inner2 == true_test_var)
2060 return (is_or ? var : inner2);
2061 else if (inner1 == false_test_var)
2064 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2065 else if (inner2 == false_test_var)
2068 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2070 /* Next, redistribute/reassociate the OR across the inner tests.
2071 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2072 if (TREE_CODE (inner1) == SSA_NAME
2073 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2074 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2075 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2076 gimple_assign_rhs1 (s),
2077 gimple_assign_rhs2 (s),
2078 code2, op2a, op2b)))
2080 /* Handle the OR case, where we are reassociating:
2081 (inner1 OR inner2) OR (op2a code2 op2b)
2083 If the partial result t is a constant, we win. Otherwise
2084 continue on to try reassociating with the other inner test. */
2087 if (integer_onep (t))
2088 return boolean_true_node;
2089 else if (integer_zerop (t))
2093 /* Handle the AND case, where we are redistributing:
2094 (inner1 AND inner2) OR (op2a code2 op2b)
2095 => (t AND (inner2 OR (op2a code op2b))) */
2096 else if (integer_zerop (t))
2097 return boolean_false_node;
2099 /* Save partial result for later. */
2103 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2104 if (TREE_CODE (inner2) == SSA_NAME
2105 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2106 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2107 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2108 gimple_assign_rhs1 (s),
2109 gimple_assign_rhs2 (s),
2110 code2, op2a, op2b)))
2112 /* Handle the OR case, where we are reassociating:
2113 (inner1 OR inner2) OR (op2a code2 op2b)
2115 => (t OR partial) */
2118 if (integer_zerop (t))
2120 else if (integer_onep (t))
2121 return boolean_true_node;
2122 /* If both are the same, we can apply the identity
2124 else if (partial && same_bool_result_p (t, partial))
2128 /* Handle the AND case, where we are redistributing:
2129 (inner1 AND inner2) OR (op2a code2 op2b)
2130 => (t AND (inner1 OR (op2a code2 op2b)))
2131 => (t AND partial) */
2134 if (integer_zerop (t))
2135 return boolean_false_node;
2138 /* We already got a simplification for the other
2139 operand to the redistributed AND expression. The
2140 interesting case is when at least one is true.
2141 Or, if both are the same, we can apply the identity
2143 if (integer_onep (partial))
2145 else if (integer_onep (t))
2147 else if (same_bool_result_p (t, partial))
2156 /* Try to simplify the OR of two comparisons defined by
2157 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2158 If this can be done without constructing an intermediate value,
2159 return the resulting tree; otherwise NULL_TREE is returned.
2160 This function is deliberately asymmetric as it recurses on SSA_DEFs
2161 in the first comparison but not the second. */
2164 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2165 enum tree_code code2, tree op2a, tree op2b)
2167 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2168 if (operand_equal_p (op1a, op2a, 0)
2169 && operand_equal_p (op1b, op2b, 0))
2171 /* Result will be either NULL_TREE, or a combined comparison. */
2172 tree t = combine_comparisons (UNKNOWN_LOCATION,
2173 TRUTH_ORIF_EXPR, code1, code2,
2174 boolean_type_node, op1a, op1b);
2179 /* Likewise the swapped case of the above. */
2180 if (operand_equal_p (op1a, op2b, 0)
2181 && operand_equal_p (op1b, op2a, 0))
2183 /* Result will be either NULL_TREE, or a combined comparison. */
2184 tree t = combine_comparisons (UNKNOWN_LOCATION,
2185 TRUTH_ORIF_EXPR, code1,
2186 swap_tree_comparison (code2),
2187 boolean_type_node, op1a, op1b);
2192 /* If both comparisons are of the same value against constants, we might
2193 be able to merge them. */
2194 if (operand_equal_p (op1a, op2a, 0)
2195 && TREE_CODE (op1b) == INTEGER_CST
2196 && TREE_CODE (op2b) == INTEGER_CST)
2198 int cmp = tree_int_cst_compare (op1b, op2b);
2200 /* If we have (op1a != op1b), we should either be able to
2201 return that or TRUE, depending on whether the constant op1b
2202 also satisfies the other comparison against op2b. */
2203 if (code1 == NE_EXPR)
2209 case EQ_EXPR: val = (cmp == 0); break;
2210 case NE_EXPR: val = (cmp != 0); break;
2211 case LT_EXPR: val = (cmp < 0); break;
2212 case GT_EXPR: val = (cmp > 0); break;
2213 case LE_EXPR: val = (cmp <= 0); break;
2214 case GE_EXPR: val = (cmp >= 0); break;
2215 default: done = false;
2220 return boolean_true_node;
2222 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2225 /* Likewise if the second comparison is a != comparison. */
2226 else if (code2 == NE_EXPR)
2232 case EQ_EXPR: val = (cmp == 0); break;
2233 case NE_EXPR: val = (cmp != 0); break;
2234 case LT_EXPR: val = (cmp > 0); break;
2235 case GT_EXPR: val = (cmp < 0); break;
2236 case LE_EXPR: val = (cmp >= 0); break;
2237 case GE_EXPR: val = (cmp <= 0); break;
2238 default: done = false;
2243 return boolean_true_node;
2245 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2249 /* See if an equality test is redundant with the other comparison. */
2250 else if (code1 == EQ_EXPR)
2255 case EQ_EXPR: val = (cmp == 0); break;
2256 case NE_EXPR: val = (cmp != 0); break;
2257 case LT_EXPR: val = (cmp < 0); break;
2258 case GT_EXPR: val = (cmp > 0); break;
2259 case LE_EXPR: val = (cmp <= 0); break;
2260 case GE_EXPR: val = (cmp >= 0); break;
2265 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2267 else if (code2 == EQ_EXPR)
2272 case EQ_EXPR: val = (cmp == 0); break;
2273 case NE_EXPR: val = (cmp != 0); break;
2274 case LT_EXPR: val = (cmp > 0); break;
2275 case GT_EXPR: val = (cmp < 0); break;
2276 case LE_EXPR: val = (cmp >= 0); break;
2277 case GE_EXPR: val = (cmp <= 0); break;
2282 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2285 /* Chose the less restrictive of two < or <= comparisons. */
2286 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2287 && (code2 == LT_EXPR || code2 == LE_EXPR))
2289 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2290 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2292 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2295 /* Likewise chose the less restrictive of two > or >= comparisons. */
2296 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2297 && (code2 == GT_EXPR || code2 == GE_EXPR))
2299 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2300 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2302 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2305 /* Check for singleton ranges. */
2307 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2308 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2309 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2311 /* Check for less/greater pairs that don't restrict the range at all. */
2313 && (code1 == LT_EXPR || code1 == LE_EXPR)
2314 && (code2 == GT_EXPR || code2 == GE_EXPR))
2315 return boolean_true_node;
2317 && (code1 == GT_EXPR || code1 == GE_EXPR)
2318 && (code2 == LT_EXPR || code2 == LE_EXPR))
2319 return boolean_true_node;
2322 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2323 NAME's definition is a truth value. See if there are any simplifications
2324 that can be done against the NAME's definition. */
2325 if (TREE_CODE (op1a) == SSA_NAME
2326 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2327 && (integer_zerop (op1b) || integer_onep (op1b)))
2329 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2330 || (code1 == NE_EXPR && integer_onep (op1b)));
2331 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2332 switch (gimple_code (stmt))
2335 /* Try to simplify by copy-propagating the definition. */
2336 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2339 /* If every argument to the PHI produces the same result when
2340 ORed with the second comparison, we win.
2341 Do not do this unless the type is bool since we need a bool
2342 result here anyway. */
2343 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2345 tree result = NULL_TREE;
2347 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2349 tree arg = gimple_phi_arg_def (stmt, i);
2351 /* If this PHI has itself as an argument, ignore it.
2352 If all the other args produce the same result,
2354 if (arg == gimple_phi_result (stmt))
2356 else if (TREE_CODE (arg) == INTEGER_CST)
2358 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2361 result = boolean_true_node;
2362 else if (!integer_onep (result))
2366 result = fold_build2 (code2, boolean_type_node,
2368 else if (!same_bool_comparison_p (result,
2372 else if (TREE_CODE (arg) == SSA_NAME
2373 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2376 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2377 /* In simple cases we can look through PHI nodes,
2378 but we have to be careful with loops.
2380 if (! dom_info_available_p (CDI_DOMINATORS)
2381 || gimple_bb (def_stmt) == gimple_bb (stmt)
2382 || dominated_by_p (CDI_DOMINATORS,
2383 gimple_bb (def_stmt),
2386 temp = or_var_with_comparison (arg, invert, code2,
2392 else if (!same_bool_result_p (result, temp))
2408 /* Try to simplify the OR of two comparisons, specified by
2409 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2410 If this can be simplified to a single expression (without requiring
2411 introducing more SSA variables to hold intermediate values),
2412 return the resulting tree. Otherwise return NULL_TREE.
2413 If the result expression is non-null, it has boolean type. */
2416 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2417 enum tree_code code2, tree op2a, tree op2b)
2419 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2423 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2427 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2429 Either NULL_TREE, a simplified but non-constant or a constant
2432 ??? This should go into a gimple-fold-inline.h file to be eventually
2433 privatized with the single valueize function used in the various TUs
2434 to avoid the indirect function call overhead. */
2437 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2439 location_t loc = gimple_location (stmt);
2440 switch (gimple_code (stmt))
2444 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2446 switch (get_gimple_rhs_class (subcode))
2448 case GIMPLE_SINGLE_RHS:
2450 tree rhs = gimple_assign_rhs1 (stmt);
2451 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2453 if (TREE_CODE (rhs) == SSA_NAME)
2455 /* If the RHS is an SSA_NAME, return its known constant value,
2457 return (*valueize) (rhs);
2459 /* Handle propagating invariant addresses into address
2461 else if (TREE_CODE (rhs) == ADDR_EXPR
2462 && !is_gimple_min_invariant (rhs))
2464 HOST_WIDE_INT offset;
2466 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2470 && (CONSTANT_CLASS_P (base)
2471 || decl_address_invariant_p (base)))
2472 return build_invariant_address (TREE_TYPE (rhs),
2475 else if (TREE_CODE (rhs) == CONSTRUCTOR
2476 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2477 && (CONSTRUCTOR_NELTS (rhs)
2478 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2484 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2486 val = (*valueize) (val);
2487 if (TREE_CODE (val) == INTEGER_CST
2488 || TREE_CODE (val) == REAL_CST
2489 || TREE_CODE (val) == FIXED_CST)
2490 list = tree_cons (NULL_TREE, val, list);
2495 return build_vector (TREE_TYPE (rhs), nreverse (list));
2498 if (kind == tcc_reference)
2500 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2501 || TREE_CODE (rhs) == REALPART_EXPR
2502 || TREE_CODE (rhs) == IMAGPART_EXPR)
2503 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2505 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2506 return fold_unary_loc (EXPR_LOCATION (rhs),
2508 TREE_TYPE (rhs), val);
2510 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2511 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2513 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2514 return fold_ternary_loc (EXPR_LOCATION (rhs),
2516 TREE_TYPE (rhs), val,
2517 TREE_OPERAND (rhs, 1),
2518 TREE_OPERAND (rhs, 2));
2520 else if (TREE_CODE (rhs) == MEM_REF
2521 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2523 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2524 if (TREE_CODE (val) == ADDR_EXPR
2525 && is_gimple_min_invariant (val))
2527 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2529 TREE_OPERAND (rhs, 1));
2534 return fold_const_aggregate_ref_1 (rhs, valueize);
2536 else if (kind == tcc_declaration)
2537 return get_symbol_constant_value (rhs);
2541 case GIMPLE_UNARY_RHS:
2543 /* Handle unary operators that can appear in GIMPLE form.
2544 Note that we know the single operand must be a constant,
2545 so this should almost always return a simplified RHS. */
2546 tree lhs = gimple_assign_lhs (stmt);
2547 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2549 /* Conversions are useless for CCP purposes if they are
2550 value-preserving. Thus the restrictions that
2551 useless_type_conversion_p places for restrict qualification
2552 of pointer types should not apply here.
2553 Substitution later will only substitute to allowed places. */
2554 if (CONVERT_EXPR_CODE_P (subcode)
2555 && POINTER_TYPE_P (TREE_TYPE (lhs))
2556 && POINTER_TYPE_P (TREE_TYPE (op0))
2557 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2558 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2562 fold_unary_ignore_overflow_loc (loc, subcode,
2563 gimple_expr_type (stmt), op0);
2566 case GIMPLE_BINARY_RHS:
2568 /* Handle binary operators that can appear in GIMPLE form. */
2569 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2570 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2572 /* Translate &x + CST into an invariant form suitable for
2573 further propagation. */
2574 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2575 && TREE_CODE (op0) == ADDR_EXPR
2576 && TREE_CODE (op1) == INTEGER_CST)
2578 tree off = fold_convert (ptr_type_node, op1);
2579 return build_fold_addr_expr_loc
2581 fold_build2 (MEM_REF,
2582 TREE_TYPE (TREE_TYPE (op0)),
2583 unshare_expr (op0), off));
2586 return fold_binary_loc (loc, subcode,
2587 gimple_expr_type (stmt), op0, op1);
2590 case GIMPLE_TERNARY_RHS:
2592 /* Handle ternary operators that can appear in GIMPLE form. */
2593 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2594 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2595 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2597 return fold_ternary_loc (loc, subcode,
2598 gimple_expr_type (stmt), op0, op1, op2);
2610 if (gimple_call_internal_p (stmt))
2611 /* No folding yet for these functions. */
2614 fn = (*valueize) (gimple_call_fn (stmt));
2615 if (TREE_CODE (fn) == ADDR_EXPR
2616 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2617 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2619 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2622 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2623 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2624 call = build_call_array_loc (loc,
2625 gimple_call_return_type (stmt),
2626 fn, gimple_call_num_args (stmt), args);
2627 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2629 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2630 STRIP_NOPS (retval);
2641 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2642 Returns NULL_TREE if folding to a constant is not possible, otherwise
2643 returns a constant according to is_gimple_min_invariant. */
2646 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2648 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2649 if (res && is_gimple_min_invariant (res))
2655 /* The following set of functions are supposed to fold references using
2656 their constant initializers. */
2658 static tree fold_ctor_reference (tree type, tree ctor,
2659 unsigned HOST_WIDE_INT offset,
2660 unsigned HOST_WIDE_INT size);
2662 /* See if we can find constructor defining value of BASE.
2663 When we know the consructor with constant offset (such as
2664 base is array[40] and we do know constructor of array), then
2665 BIT_OFFSET is adjusted accordingly.
2667 As a special case, return error_mark_node when constructor
2668 is not explicitly available, but it is known to be zero
2669 such as 'static const int a;'. */
2671 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2672 tree (*valueize)(tree))
2674 HOST_WIDE_INT bit_offset2, size, max_size;
2675 if (TREE_CODE (base) == MEM_REF)
2677 if (!integer_zerop (TREE_OPERAND (base, 1)))
2679 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2681 *bit_offset += (mem_ref_offset (base).low
2686 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2687 base = valueize (TREE_OPERAND (base, 0));
2688 if (!base || TREE_CODE (base) != ADDR_EXPR)
2690 base = TREE_OPERAND (base, 0);
2693 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2694 DECL_INITIAL. If BASE is a nested reference into another
2695 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2696 the inner reference. */
2697 switch (TREE_CODE (base))
2700 if (!const_value_known_p (base))
2705 if (!DECL_INITIAL (base)
2706 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2707 return error_mark_node;
2708 return DECL_INITIAL (base);
2712 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2713 if (max_size == -1 || size != max_size)
2715 *bit_offset += bit_offset2;
2716 return get_base_constructor (base, bit_offset, valueize);
2727 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2728 to the memory at bit OFFSET.
2730 We do only simple job of folding byte accesses. */
2733 fold_string_cst_ctor_reference (tree type, tree ctor,
2734 unsigned HOST_WIDE_INT offset,
2735 unsigned HOST_WIDE_INT size)
2737 if (INTEGRAL_TYPE_P (type)
2738 && (TYPE_MODE (type)
2739 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2740 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2742 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2743 && size == BITS_PER_UNIT
2744 && !(offset % BITS_PER_UNIT))
2746 offset /= BITS_PER_UNIT;
2747 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2748 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2751 const char a[20]="hello";
2754 might lead to offset greater than string length. In this case we
2755 know value is either initialized to 0 or out of bounds. Return 0
2757 return build_zero_cst (type);
2762 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2763 SIZE to the memory at bit OFFSET. */
2766 fold_array_ctor_reference (tree type, tree ctor,
2767 unsigned HOST_WIDE_INT offset,
2768 unsigned HOST_WIDE_INT size)
2770 unsigned HOST_WIDE_INT cnt;
2772 double_int low_bound, elt_size;
2773 double_int index, max_index;
2774 double_int access_index;
2775 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2776 HOST_WIDE_INT inner_offset;
2778 /* Compute low bound and elt size. */
2779 if (domain_type && TYPE_MIN_VALUE (domain_type))
2781 /* Static constructors for variably sized objects makes no sense. */
2782 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2783 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2786 low_bound = double_int_zero;
2787 /* Static constructors for variably sized objects makes no sense. */
2788 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2791 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2794 /* We can handle only constantly sized accesses that are known to not
2795 be larger than size of array element. */
2796 if (!TYPE_SIZE_UNIT (type)
2797 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2798 || double_int_cmp (elt_size,
2799 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2802 /* Compute the array index we look for. */
2803 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2804 elt_size, TRUNC_DIV_EXPR);
2805 access_index = double_int_add (access_index, low_bound);
2807 /* And offset within the access. */
2808 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2810 /* See if the array field is large enough to span whole access. We do not
2811 care to fold accesses spanning multiple array indexes. */
2812 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2815 index = double_int_sub (low_bound, double_int_one);
2816 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2818 /* Array constructor might explicitely set index, or specify range
2819 or leave index NULL meaning that it is next index after previous
2823 if (TREE_CODE (cfield) == INTEGER_CST)
2824 max_index = index = tree_to_double_int (cfield);
2827 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2828 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2829 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2833 max_index = index = double_int_add (index, double_int_one);
2835 /* Do we have match? */
2836 if (double_int_cmp (access_index, index, 1) >= 0
2837 && double_int_cmp (access_index, max_index, 1) <= 0)
2838 return fold_ctor_reference (type, cval, inner_offset, size);
2840 /* When memory is not explicitely mentioned in constructor,
2841 it is 0 (or out of range). */
2842 return build_zero_cst (type);
2845 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2846 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2849 fold_nonarray_ctor_reference (tree type, tree ctor,
2850 unsigned HOST_WIDE_INT offset,
2851 unsigned HOST_WIDE_INT size)
2853 unsigned HOST_WIDE_INT cnt;
2856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2859 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2860 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2861 tree field_size = DECL_SIZE (cfield);
2862 double_int bitoffset;
2863 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2864 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2865 double_int bitoffset_end, access_end;
2867 /* Variable sized objects in static constructors makes no sense,
2868 but field_size can be NULL for flexible array members. */
2869 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2870 && TREE_CODE (byte_offset) == INTEGER_CST
2871 && (field_size != NULL_TREE
2872 ? TREE_CODE (field_size) == INTEGER_CST
2873 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2875 /* Compute bit offset of the field. */
2876 bitoffset = double_int_add (tree_to_double_int (field_offset),
2877 double_int_mul (byte_offset_cst,
2878 bits_per_unit_cst));
2879 /* Compute bit offset where the field ends. */
2880 if (field_size != NULL_TREE)
2881 bitoffset_end = double_int_add (bitoffset,
2882 tree_to_double_int (field_size));
2884 bitoffset_end = double_int_zero;
2886 access_end = double_int_add (uhwi_to_double_int (offset),
2887 uhwi_to_double_int (size));
2889 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2890 [BITOFFSET, BITOFFSET_END)? */
2891 if (double_int_cmp (access_end, bitoffset, 0) > 0
2892 && (field_size == NULL_TREE
2893 || double_int_cmp (uhwi_to_double_int (offset),
2894 bitoffset_end, 0) < 0))
2896 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2898 /* We do have overlap. Now see if field is large enough to
2899 cover the access. Give up for accesses spanning multiple
2901 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2903 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2905 return fold_ctor_reference (type, cval,
2906 double_int_to_uhwi (inner_offset), size);
2909 /* When memory is not explicitely mentioned in constructor, it is 0. */
2910 return build_zero_cst (type);
2913 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2914 to the memory at bit OFFSET. */
2917 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2918 unsigned HOST_WIDE_INT size)
2922 /* We found the field with exact match. */
2923 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2925 return canonicalize_constructor_val (ctor);
2927 /* We are at the end of walk, see if we can view convert the
2929 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2930 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2931 && operand_equal_p (TYPE_SIZE (type),
2932 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2934 ret = canonicalize_constructor_val (ctor);
2935 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2940 if (TREE_CODE (ctor) == STRING_CST)
2941 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2942 if (TREE_CODE (ctor) == CONSTRUCTOR)
2945 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2946 return fold_array_ctor_reference (type, ctor, offset, size);
2948 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2954 /* Return the tree representing the element referenced by T if T is an
2955 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2956 names using VALUEIZE. Return NULL_TREE otherwise. */
2959 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2961 tree ctor, idx, base;
2962 HOST_WIDE_INT offset, size, max_size;
2965 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2966 return get_symbol_constant_value (t);
2968 tem = fold_read_from_constant_string (t);
2972 switch (TREE_CODE (t))
2975 case ARRAY_RANGE_REF:
2976 /* Constant indexes are handled well by get_base_constructor.
2977 Only special case variable offsets.
2978 FIXME: This code can't handle nested references with variable indexes
2979 (they will be handled only by iteration of ccp). Perhaps we can bring
2980 get_ref_base_and_extent here and make it use a valueize callback. */
2981 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2983 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2984 && host_integerp (idx, 0))
2986 tree low_bound, unit_size;
2988 /* If the resulting bit-offset is constant, track it. */
2989 if ((low_bound = array_ref_low_bound (t),
2990 host_integerp (low_bound, 0))
2991 && (unit_size = array_ref_element_size (t),
2992 host_integerp (unit_size, 1)))
2994 offset = TREE_INT_CST_LOW (idx);
2995 offset -= TREE_INT_CST_LOW (low_bound);
2996 offset *= TREE_INT_CST_LOW (unit_size);
2997 offset *= BITS_PER_UNIT;
2999 base = TREE_OPERAND (t, 0);
3000 ctor = get_base_constructor (base, &offset, valueize);
3001 /* Empty constructor. Always fold to 0. */
3002 if (ctor == error_mark_node)
3003 return build_zero_cst (TREE_TYPE (t));
3004 /* Out of bound array access. Value is undefined,
3008 /* We can not determine ctor. */
3011 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3012 TREE_INT_CST_LOW (unit_size)
3020 case TARGET_MEM_REF:
3022 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3023 ctor = get_base_constructor (base, &offset, valueize);
3025 /* Empty constructor. Always fold to 0. */
3026 if (ctor == error_mark_node)
3027 return build_zero_cst (TREE_TYPE (t));
3028 /* We do not know precise address. */
3029 if (max_size == -1 || max_size != size)
3031 /* We can not determine ctor. */
3035 /* Out of bound array access. Value is undefined, but don't fold. */
3039 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3044 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3045 if (c && TREE_CODE (c) == COMPLEX_CST)
3046 return fold_build1_loc (EXPR_LOCATION (t),
3047 TREE_CODE (t), TREE_TYPE (t), c);
3059 fold_const_aggregate_ref (tree t)
3061 return fold_const_aggregate_ref_1 (t, NULL);
3064 /* Return true iff VAL is a gimple expression that is known to be
3065 non-negative. Restricted to floating-point inputs. */
3068 gimple_val_nonnegative_real_p (tree val)
3072 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3074 /* Use existing logic for non-gimple trees. */
3075 if (tree_expr_nonnegative_p (val))
3078 if (TREE_CODE (val) != SSA_NAME)
3081 /* Currently we look only at the immediately defining statement
3082 to make this determination, since recursion on defining
3083 statements of operands can lead to quadratic behavior in the
3084 worst case. This is expected to catch almost all occurrences
3085 in practice. It would be possible to implement limited-depth
3086 recursion if important cases are lost. Alternatively, passes
3087 that need this information (such as the pow/powi lowering code
3088 in the cse_sincos pass) could be revised to provide it through
3089 dataflow propagation. */
3091 def_stmt = SSA_NAME_DEF_STMT (val);
3093 if (is_gimple_assign (def_stmt))
3097 /* See fold-const.c:tree_expr_nonnegative_p for additional
3098 cases that could be handled with recursion. */
3100 switch (gimple_assign_rhs_code (def_stmt))
3103 /* Always true for floating-point operands. */
3107 /* True if the two operands are identical (since we are
3108 restricted to floating-point inputs). */
3109 op0 = gimple_assign_rhs1 (def_stmt);
3110 op1 = gimple_assign_rhs2 (def_stmt);
3113 || operand_equal_p (op0, op1, 0))
3120 else if (is_gimple_call (def_stmt))
3122 tree fndecl = gimple_call_fndecl (def_stmt);
3124 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3128 switch (DECL_FUNCTION_CODE (fndecl))
3130 CASE_FLT_FN (BUILT_IN_ACOS):
3131 CASE_FLT_FN (BUILT_IN_ACOSH):
3132 CASE_FLT_FN (BUILT_IN_CABS):
3133 CASE_FLT_FN (BUILT_IN_COSH):
3134 CASE_FLT_FN (BUILT_IN_ERFC):
3135 CASE_FLT_FN (BUILT_IN_EXP):
3136 CASE_FLT_FN (BUILT_IN_EXP10):
3137 CASE_FLT_FN (BUILT_IN_EXP2):
3138 CASE_FLT_FN (BUILT_IN_FABS):
3139 CASE_FLT_FN (BUILT_IN_FDIM):
3140 CASE_FLT_FN (BUILT_IN_HYPOT):
3141 CASE_FLT_FN (BUILT_IN_POW10):
3144 CASE_FLT_FN (BUILT_IN_SQRT):
3145 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3146 nonnegative inputs. */
3147 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3152 CASE_FLT_FN (BUILT_IN_POWI):
3153 /* True if the second argument is an even integer. */
3154 arg1 = gimple_call_arg (def_stmt, 1);
3156 if (TREE_CODE (arg1) == INTEGER_CST
3157 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3162 CASE_FLT_FN (BUILT_IN_POW):
3163 /* True if the second argument is an even integer-valued
3165 arg1 = gimple_call_arg (def_stmt, 1);
3167 if (TREE_CODE (arg1) == REAL_CST)
3172 c = TREE_REAL_CST (arg1);
3173 n = real_to_integer (&c);
3177 REAL_VALUE_TYPE cint;
3178 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3179 if (real_identical (&c, &cint))