1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node || !node->analyzed || node->global.inlined_to)
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval)
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
128 if (TREE_CODE (cval) == ADDR_EXPR)
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
135 && !can_refer_decl_in_current_unit_p (base))
137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
138 add_referenced_var (base);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
150 get_symbol_constant_value (tree sym)
152 if (const_value_known_p (sym))
154 tree val = DECL_INITIAL (sym);
157 val = canonicalize_constructor_val (val);
158 if (val && is_gimple_min_invariant (val))
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
169 return build_zero_cst (TREE_TYPE (sym));
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
180 may_propagate_address_into_dereference (tree addr, tree deref)
182 gcc_assert (TREE_CODE (deref) == MEM_REF
183 && TREE_CODE (addr) == ADDR_EXPR);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
213 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
226 tree low_bound = array_ref_low_bound (base);
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
234 base = TREE_OPERAND (base, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
241 elt_type = TREE_TYPE (array_type);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type = ssizetype;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
253 if (integer_zerop (offset))
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
258 idx = build_int_cst (idx_type, 0);
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
294 min_idx = fold_convert (idx_type, min_idx);
296 if (TREE_CODE (min_idx) != INTEGER_CST)
299 elt_offset = fold_convert (idx_type, elt_offset);
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
310 /* We don't want to construct access past array bounds. For example
313 should not be simplified into (*c)[14] or tree-vrp will
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
327 else if (compare_tree_int (idx, 0) < 0)
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
345 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
351 if (TREE_CODE (base) != ADDR_EXPR)
354 base = TREE_OPERAND (base, 0);
355 if (types_compatible_p (orig_type, TREE_TYPE (base))
356 && integer_zerop (offset))
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
369 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
375 if (TREE_CODE (addr) != ADDR_EXPR)
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
384 SET_EXPR_LOCATION (ret, loc);
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
401 which may be able to propagate further. */
404 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
412 op0 = TREE_OPERAND (op0, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
448 else if (integer_onep (elsz)
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
471 && !handled_component_p (op0))
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
486 integer_zero_node, NULL_TREE));
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
502 if (TREE_CODE (array_idx) != INTEGER_CST)
504 if (TREE_CODE (elt_size) != INTEGER_CST)
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
511 min_idx = TYPE_MIN_VALUE (min_idx);
514 if (TREE_CODE (min_idx) != INTEGER_CST)
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
542 /* At which point we can try some of the same things as for indirects. */
543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
549 SET_EXPR_LOCATION (t, loc);
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
561 maybe_fold_reference (tree expr, bool is_lhs)
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
597 TREE_THIS_VOLATILE (tem) = volatile_p;
599 tem = maybe_fold_reference (expr, is_lhs);
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t, 0), 0))))
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
637 tree tem = maybe_fold_tmr (*t);
641 tem = maybe_fold_reference (expr, is_lhs);
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
658 fold_gimple_assign (gimple_stmt_iterator *si)
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
664 tree result = NULL_TREE;
666 switch (get_gimple_rhs_class (subcode))
668 case GIMPLE_SINGLE_RHS:
670 tree rhs = gimple_assign_rhs1 (stmt);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
675 tree op0 = COND_EXPR_COND (rhs);
678 location_t cond_loc = EXPR_LOCATION (rhs);
680 if (COMPARISON_CLASS_P (op0))
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
694 else if (is_gimple_min_invariant (op0))
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
748 /* If we couldn't fold the RHS, hand over to the generic
750 if (result == NULL_TREE)
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
758 if (result != rhs && valid_gimple_rhs_p (result))
765 case GIMPLE_UNARY_RHS:
767 tree rhs = gimple_assign_rhs1 (stmt);
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
819 result = fold_binary_loc (loc, subcode,
820 TREE_TYPE (gimple_assign_lhs (stmt)),
821 gimple_assign_rhs1 (stmt),
822 gimple_assign_rhs2 (stmt));
826 STRIP_USELESS_TYPE_CONVERSION (result);
827 if (valid_gimple_rhs_p (result))
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
834 if (commutative_tree_code (subcode)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
836 gimple_assign_rhs2 (stmt), false))
837 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
838 gimple_assign_rhs2 (stmt),
839 gimple_assign_rhs1 (stmt));
843 case GIMPLE_TERNARY_RHS:
844 result = fold_ternary_loc (loc, subcode,
845 TREE_TYPE (gimple_assign_lhs (stmt)),
846 gimple_assign_rhs1 (stmt),
847 gimple_assign_rhs2 (stmt),
848 gimple_assign_rhs3 (stmt));
852 STRIP_USELESS_TYPE_CONVERSION (result);
853 if (valid_gimple_rhs_p (result))
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
860 if (commutative_ternary_tree_code (subcode)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
862 gimple_assign_rhs2 (stmt), false))
863 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
864 gimple_assign_rhs2 (stmt),
865 gimple_assign_rhs1 (stmt),
866 gimple_assign_rhs3 (stmt));
870 case GIMPLE_INVALID_RHS:
877 /* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
883 fold_gimple_cond (gimple stmt)
885 tree result = fold_binary_loc (gimple_location (stmt),
886 gimple_cond_code (stmt),
888 gimple_cond_lhs (stmt),
889 gimple_cond_rhs (stmt));
893 STRIP_USELESS_TYPE_CONVERSION (result);
894 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
896 gimple_cond_set_condition_from_tree (stmt, result);
904 /* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
915 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
918 tree tmp = NULL_TREE; /* Silence warning. */
919 gimple stmt, new_stmt;
920 gimple_stmt_iterator i;
921 gimple_seq stmts = gimple_seq_alloc();
922 struct gimplify_ctx gctx;
924 gimple laststore = NULL;
927 stmt = gsi_stmt (*si_p);
929 gcc_assert (is_gimple_call (stmt));
931 lhs = gimple_call_lhs (stmt);
932 reaching_vuse = gimple_vuse (stmt);
934 push_gimplify_context (&gctx);
936 if (lhs == NULL_TREE)
938 gimplify_and_add (expr, &stmts);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts))
943 pop_gimplify_context (NULL);
944 if (gimple_in_ssa_p (cfun))
946 unlink_stmt_vdef (stmt);
949 gsi_remove (si_p, true);
954 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
956 pop_gimplify_context (NULL);
958 if (gimple_has_location (stmt))
959 annotate_all_with_location (stmts, gimple_location (stmt));
961 /* The replacement can expose previously unreferenced variables. */
962 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
966 gsi_insert_before (si_p, last, GSI_NEW_STMT);
969 new_stmt = gsi_stmt (i);
970 if (gimple_in_ssa_p (cfun))
972 find_new_referenced_vars (new_stmt);
973 mark_symbols_for_renaming (new_stmt);
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt))
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
983 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
984 gimple_set_vdef (laststore, reaching_vuse);
985 update_stmt (laststore);
988 gimple_set_vuse (new_stmt, reaching_vuse);
989 gimple_set_modified (new_stmt, true);
991 if (gimple_assign_single_p (new_stmt)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
994 laststore = new_stmt;
999 if (lhs == NULL_TREE)
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt) && laststore)
1007 gimple_set_vdef (laststore, gimple_vdef (stmt));
1008 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1010 update_stmt (laststore);
1012 else if (gimple_in_ssa_p (cfun))
1014 unlink_stmt_vdef (stmt);
1015 release_defs (stmt);
1023 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1026 if (laststore && is_gimple_reg (lhs))
1028 gimple_set_vdef (laststore, gimple_vdef (stmt));
1029 update_stmt (laststore);
1030 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1036 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1037 gimple_set_vdef (laststore, reaching_vuse);
1038 update_stmt (laststore);
1041 new_stmt = gimple_build_assign (lhs, tmp);
1042 if (!is_gimple_reg (tmp))
1043 gimple_set_vuse (new_stmt, reaching_vuse);
1044 if (!is_gimple_reg (lhs))
1046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1047 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1050 else if (reaching_vuse == gimple_vuse (stmt))
1051 unlink_stmt_vdef (stmt);
1054 gimple_set_location (new_stmt, gimple_location (stmt));
1055 gsi_replace (si_p, new_stmt, false);
1058 /* Return the string length, maximum string length or maximum value of
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1068 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1073 if (TREE_CODE (arg) != SSA_NAME)
1075 if (TREE_CODE (arg) == COND_EXPR)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1083 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1084 if (TREE_CODE (aop0) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1086 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1087 length, visited, type);
1093 if (TREE_CODE (val) != INTEGER_CST
1094 || tree_int_cst_sgn (val) < 0)
1098 val = c_strlen (arg, 1);
1106 if (TREE_CODE (*length) != INTEGER_CST
1107 || TREE_CODE (val) != INTEGER_CST)
1110 if (tree_int_cst_lt (*length, val))
1114 else if (simple_cst_equal (val, *length) != 1)
1122 /* If we were already here, break the infinite cycle. */
1123 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1127 def_stmt = SSA_NAME_DEF_STMT (var);
1129 switch (gimple_code (def_stmt))
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1135 if (gimple_assign_single_p (def_stmt)
1136 || gimple_assign_unary_nop_p (def_stmt))
1138 tree rhs = gimple_assign_rhs1 (def_stmt);
1139 return get_maxval_strlen (rhs, length, visited, type);
1145 /* All the arguments of the PHI node must have the same constant
1149 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1151 tree arg = gimple_phi_arg (def_stmt, i)->def;
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg == gimple_phi_result (def_stmt))
1162 if (!get_maxval_strlen (arg, length, visited, type))
1174 /* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1182 gimple_fold_builtin (gimple stmt)
1184 tree result, val[3];
1190 location_t loc = gimple_location (stmt);
1192 gcc_assert (is_gimple_call (stmt));
1194 ignore = (gimple_call_lhs (stmt) == NULL);
1196 /* First try the generic builtin folder. If that succeeds, return the
1198 result = fold_call_stmt (stmt, ignore);
1202 STRIP_NOPS (result);
1206 /* Ignore MD builtins. */
1207 callee = gimple_call_fndecl (stmt);
1208 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1211 /* If the builtin could not be folded, and it has no argument list,
1213 nargs = gimple_call_num_args (stmt);
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee))
1220 case BUILT_IN_STRLEN:
1221 case BUILT_IN_FPUTS:
1222 case BUILT_IN_FPUTS_UNLOCKED:
1226 case BUILT_IN_STRCPY:
1227 case BUILT_IN_STRNCPY:
1231 case BUILT_IN_MEMCPY_CHK:
1232 case BUILT_IN_MEMPCPY_CHK:
1233 case BUILT_IN_MEMMOVE_CHK:
1234 case BUILT_IN_MEMSET_CHK:
1235 case BUILT_IN_STRNCPY_CHK:
1239 case BUILT_IN_STRCPY_CHK:
1240 case BUILT_IN_STPCPY_CHK:
1244 case BUILT_IN_SNPRINTF_CHK:
1245 case BUILT_IN_VSNPRINTF_CHK:
1253 if (arg_idx >= nargs)
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited = BITMAP_ALLOC (NULL);
1258 bitmap_clear (visited);
1260 memset (val, 0, sizeof (val));
1261 a = gimple_call_arg (stmt, arg_idx);
1262 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1263 val[arg_idx] = NULL_TREE;
1265 BITMAP_FREE (visited);
1268 switch (DECL_FUNCTION_CODE (callee))
1270 case BUILT_IN_STRLEN:
1271 if (val[0] && nargs == 1)
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1276 /* If the result is not a valid gimple value, or not a cast
1277 of a valid gimple value, then we cannot use the result. */
1278 if (is_gimple_val (new_val)
1279 || (CONVERT_EXPR_P (new_val)
1280 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1285 case BUILT_IN_STRCPY:
1286 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1287 result = fold_builtin_strcpy (loc, callee,
1288 gimple_call_arg (stmt, 0),
1289 gimple_call_arg (stmt, 1),
1293 case BUILT_IN_STRNCPY:
1294 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1295 result = fold_builtin_strncpy (loc, callee,
1296 gimple_call_arg (stmt, 0),
1297 gimple_call_arg (stmt, 1),
1298 gimple_call_arg (stmt, 2),
1302 case BUILT_IN_FPUTS:
1304 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1305 gimple_call_arg (stmt, 1),
1306 ignore, false, val[0]);
1309 case BUILT_IN_FPUTS_UNLOCKED:
1311 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 ignore, true, val[0]);
1316 case BUILT_IN_MEMCPY_CHK:
1317 case BUILT_IN_MEMPCPY_CHK:
1318 case BUILT_IN_MEMMOVE_CHK:
1319 case BUILT_IN_MEMSET_CHK:
1320 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1321 result = fold_builtin_memory_chk (loc, callee,
1322 gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 gimple_call_arg (stmt, 2),
1325 gimple_call_arg (stmt, 3),
1327 DECL_FUNCTION_CODE (callee));
1330 case BUILT_IN_STRCPY_CHK:
1331 case BUILT_IN_STPCPY_CHK:
1332 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1333 result = fold_builtin_stxcpy_chk (loc, callee,
1334 gimple_call_arg (stmt, 0),
1335 gimple_call_arg (stmt, 1),
1336 gimple_call_arg (stmt, 2),
1338 DECL_FUNCTION_CODE (callee));
1341 case BUILT_IN_STRNCPY_CHK:
1342 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1343 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1344 gimple_call_arg (stmt, 1),
1345 gimple_call_arg (stmt, 2),
1346 gimple_call_arg (stmt, 3),
1350 case BUILT_IN_SNPRINTF_CHK:
1351 case BUILT_IN_VSNPRINTF_CHK:
1352 if (val[1] && is_gimple_val (val[1]))
1353 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1354 DECL_FUNCTION_CODE (callee));
1361 if (result && ignore)
1362 result = fold_ignored_result (result);
1366 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1375 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1376 tree *delta, bool refuse_thunks)
1380 struct cgraph_node *node;
1382 v = BINFO_VIRTUALS (known_binfo);
1383 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1389 i += (TARGET_VTABLE_USES_DESCRIPTORS
1390 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1394 fndecl = TREE_VALUE (v);
1395 node = cgraph_get_node_or_alias (fndecl);
1398 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1399 thunks are represented by a constant in TREE_PURPOSE of items in
1400 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1403 FIXME: Remove the following condition once we are able to represent
1404 thunk information on call graph edges. */
1405 || (node->same_body_alias && node->thunk.thunk_p)))
1408 /* When cgraph node is missing and function is not public, we cannot
1409 devirtualize. This can happen in WHOPR when the actual method
1410 ends up in other partition, because we found devirtualization
1411 possibility too late. */
1412 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1415 *delta = TREE_PURPOSE (v);
1416 gcc_checking_assert (host_integerp (*delta, 0));
1420 /* Generate code adjusting the first parameter of a call statement determined
1424 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1426 gimple call_stmt = gsi_stmt (*gsi);
1430 delta = fold_convert (sizetype, delta);
1431 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1432 parm = gimple_call_arg (call_stmt, 0);
1433 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1434 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1435 add_referenced_var (tmp);
1437 tmp = make_ssa_name (tmp, NULL);
1438 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1439 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1440 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1441 gimple_call_set_arg (call_stmt, 0, tmp);
1444 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1445 The statement may be replaced by another statement, e.g., if the call
1446 simplifies to a constant value. Return true if any changes were made.
1447 It is assumed that the operands have been previously folded. */
1450 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1452 gimple stmt = gsi_stmt (*gsi);
1455 /* Check for builtins that CCP can handle using information not
1456 available in the generic fold routines. */
1457 callee = gimple_call_fndecl (stmt);
1458 if (!inplace && callee && DECL_BUILT_IN (callee))
1460 tree result = gimple_fold_builtin (stmt);
1464 if (!update_call_from_tree (gsi, result))
1465 gimplify_and_update_call_from_tree (gsi, result);
1470 /* Check for virtual calls that became direct calls. */
1471 callee = gimple_call_fn (stmt);
1472 if (TREE_CODE (callee) == OBJ_TYPE_REF
1473 && gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1475 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1482 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1483 distinguishes both cases. */
1486 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1488 bool changed = false;
1489 gimple stmt = gsi_stmt (*gsi);
1492 /* Fold the main computation performed by the statement. */
1493 switch (gimple_code (stmt))
1497 unsigned old_num_ops = gimple_num_ops (stmt);
1498 tree new_rhs = fold_gimple_assign (gsi);
1499 tree lhs = gimple_assign_lhs (stmt);
1501 && !useless_type_conversion_p (TREE_TYPE (lhs),
1502 TREE_TYPE (new_rhs)))
1503 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1506 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1508 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1515 changed |= fold_gimple_cond (stmt);
1519 /* Fold *& in call arguments. */
1520 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1521 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1523 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1526 gimple_call_set_arg (stmt, i, tmp);
1530 changed |= gimple_fold_call (gsi, inplace);
1534 /* Fold *& in asm operands. */
1535 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1537 tree link = gimple_asm_output_op (stmt, i);
1538 tree op = TREE_VALUE (link);
1539 if (REFERENCE_CLASS_P (op)
1540 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1542 TREE_VALUE (link) = op;
1546 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1548 tree link = gimple_asm_input_op (stmt, i);
1549 tree op = TREE_VALUE (link);
1550 if (REFERENCE_CLASS_P (op)
1551 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1553 TREE_VALUE (link) = op;
1560 if (gimple_debug_bind_p (stmt))
1562 tree val = gimple_debug_bind_get_value (stmt);
1564 && REFERENCE_CLASS_P (val))
1566 tree tem = maybe_fold_reference (val, false);
1569 gimple_debug_bind_set_value (stmt, tem);
1579 stmt = gsi_stmt (*gsi);
1581 /* Fold *& on the lhs. */
1582 if (gimple_has_lhs (stmt))
1584 tree lhs = gimple_get_lhs (stmt);
1585 if (lhs && REFERENCE_CLASS_P (lhs))
1587 tree new_lhs = maybe_fold_reference (lhs, true);
1590 gimple_set_lhs (stmt, new_lhs);
1599 /* Fold the statement pointed to by GSI. In some cases, this function may
1600 replace the whole statement with a new one. Returns true iff folding
1602 The statement pointed to by GSI should be in valid gimple form but may
1603 be in unfolded state as resulting from for example constant propagation
1604 which can produce *&x = 0. */
1607 fold_stmt (gimple_stmt_iterator *gsi)
1609 return fold_stmt_1 (gsi, false);
1612 /* Perform the minimal folding on statement STMT. Only operations like
1613 *&x created by constant propagation are handled. The statement cannot
1614 be replaced with a new one. Return true if the statement was
1615 changed, false otherwise.
1616 The statement STMT should be in valid gimple form but may
1617 be in unfolded state as resulting from for example constant propagation
1618 which can produce *&x = 0. */
1621 fold_stmt_inplace (gimple stmt)
1623 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1624 bool changed = fold_stmt_1 (&gsi, true);
1625 gcc_assert (gsi_stmt (gsi) == stmt);
1629 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1630 if EXPR is null or we don't know how.
1631 If non-null, the result always has boolean type. */
1634 canonicalize_bool (tree expr, bool invert)
1640 if (integer_nonzerop (expr))
1641 return boolean_false_node;
1642 else if (integer_zerop (expr))
1643 return boolean_true_node;
1644 else if (TREE_CODE (expr) == SSA_NAME)
1645 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1646 build_int_cst (TREE_TYPE (expr), 0));
1647 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1648 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1650 TREE_OPERAND (expr, 0),
1651 TREE_OPERAND (expr, 1));
1657 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1659 if (integer_nonzerop (expr))
1660 return boolean_true_node;
1661 else if (integer_zerop (expr))
1662 return boolean_false_node;
1663 else if (TREE_CODE (expr) == SSA_NAME)
1664 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1665 build_int_cst (TREE_TYPE (expr), 0));
1666 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1667 return fold_build2 (TREE_CODE (expr),
1669 TREE_OPERAND (expr, 0),
1670 TREE_OPERAND (expr, 1));
1676 /* Check to see if a boolean expression EXPR is logically equivalent to the
1677 comparison (OP1 CODE OP2). Check for various identities involving
1681 same_bool_comparison_p (const_tree expr, enum tree_code code,
1682 const_tree op1, const_tree op2)
1686 /* The obvious case. */
1687 if (TREE_CODE (expr) == code
1688 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1689 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1692 /* Check for comparing (name, name != 0) and the case where expr
1693 is an SSA_NAME with a definition matching the comparison. */
1694 if (TREE_CODE (expr) == SSA_NAME
1695 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1697 if (operand_equal_p (expr, op1, 0))
1698 return ((code == NE_EXPR && integer_zerop (op2))
1699 || (code == EQ_EXPR && integer_nonzerop (op2)));
1700 s = SSA_NAME_DEF_STMT (expr);
1701 if (is_gimple_assign (s)
1702 && gimple_assign_rhs_code (s) == code
1703 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1704 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1708 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1709 of name is a comparison, recurse. */
1710 if (TREE_CODE (op1) == SSA_NAME
1711 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1713 s = SSA_NAME_DEF_STMT (op1);
1714 if (is_gimple_assign (s)
1715 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1717 enum tree_code c = gimple_assign_rhs_code (s);
1718 if ((c == NE_EXPR && integer_zerop (op2))
1719 || (c == EQ_EXPR && integer_nonzerop (op2)))
1720 return same_bool_comparison_p (expr, c,
1721 gimple_assign_rhs1 (s),
1722 gimple_assign_rhs2 (s));
1723 if ((c == EQ_EXPR && integer_zerop (op2))
1724 || (c == NE_EXPR && integer_nonzerop (op2)))
1725 return same_bool_comparison_p (expr,
1726 invert_tree_comparison (c, false),
1727 gimple_assign_rhs1 (s),
1728 gimple_assign_rhs2 (s));
1734 /* Check to see if two boolean expressions OP1 and OP2 are logically
1738 same_bool_result_p (const_tree op1, const_tree op2)
1740 /* Simple cases first. */
1741 if (operand_equal_p (op1, op2, 0))
1744 /* Check the cases where at least one of the operands is a comparison.
1745 These are a bit smarter than operand_equal_p in that they apply some
1746 identifies on SSA_NAMEs. */
1747 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1748 && same_bool_comparison_p (op1, TREE_CODE (op2),
1749 TREE_OPERAND (op2, 0),
1750 TREE_OPERAND (op2, 1)))
1752 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1753 && same_bool_comparison_p (op2, TREE_CODE (op1),
1754 TREE_OPERAND (op1, 0),
1755 TREE_OPERAND (op1, 1)))
1762 /* Forward declarations for some mutually recursive functions. */
1765 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1766 enum tree_code code2, tree op2a, tree op2b);
1768 and_var_with_comparison (tree var, bool invert,
1769 enum tree_code code2, tree op2a, tree op2b);
1771 and_var_with_comparison_1 (gimple stmt,
1772 enum tree_code code2, tree op2a, tree op2b);
1774 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1775 enum tree_code code2, tree op2a, tree op2b);
1777 or_var_with_comparison (tree var, bool invert,
1778 enum tree_code code2, tree op2a, tree op2b);
1780 or_var_with_comparison_1 (gimple stmt,
1781 enum tree_code code2, tree op2a, tree op2b);
1783 /* Helper function for and_comparisons_1: try to simplify the AND of the
1784 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1785 If INVERT is true, invert the value of the VAR before doing the AND.
1786 Return NULL_EXPR if we can't simplify this to a single expression. */
1789 and_var_with_comparison (tree var, bool invert,
1790 enum tree_code code2, tree op2a, tree op2b)
1793 gimple stmt = SSA_NAME_DEF_STMT (var);
1795 /* We can only deal with variables whose definitions are assignments. */
1796 if (!is_gimple_assign (stmt))
1799 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1800 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1801 Then we only have to consider the simpler non-inverted cases. */
1803 t = or_var_with_comparison_1 (stmt,
1804 invert_tree_comparison (code2, false),
1807 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1808 return canonicalize_bool (t, invert);
1811 /* Try to simplify the AND of the ssa variable defined by the assignment
1812 STMT with the comparison specified by (OP2A CODE2 OP2B).
1813 Return NULL_EXPR if we can't simplify this to a single expression. */
1816 and_var_with_comparison_1 (gimple stmt,
1817 enum tree_code code2, tree op2a, tree op2b)
1819 tree var = gimple_assign_lhs (stmt);
1820 tree true_test_var = NULL_TREE;
1821 tree false_test_var = NULL_TREE;
1822 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1824 /* Check for identities like (var AND (var == 0)) => false. */
1825 if (TREE_CODE (op2a) == SSA_NAME
1826 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1828 if ((code2 == NE_EXPR && integer_zerop (op2b))
1829 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1831 true_test_var = op2a;
1832 if (var == true_test_var)
1835 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1836 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1838 false_test_var = op2a;
1839 if (var == false_test_var)
1840 return boolean_false_node;
1844 /* If the definition is a comparison, recurse on it. */
1845 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1847 tree t = and_comparisons_1 (innercode,
1848 gimple_assign_rhs1 (stmt),
1849 gimple_assign_rhs2 (stmt),
1857 /* If the definition is an AND or OR expression, we may be able to
1858 simplify by reassociating. */
1859 if (innercode == TRUTH_AND_EXPR
1860 || innercode == TRUTH_OR_EXPR
1861 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1862 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1864 tree inner1 = gimple_assign_rhs1 (stmt);
1865 tree inner2 = gimple_assign_rhs2 (stmt);
1868 tree partial = NULL_TREE;
1869 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1871 /* Check for boolean identities that don't require recursive examination
1873 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1874 inner1 AND (inner1 OR inner2) => inner1
1875 !inner1 AND (inner1 AND inner2) => false
1876 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1877 Likewise for similar cases involving inner2. */
1878 if (inner1 == true_test_var)
1879 return (is_and ? var : inner1);
1880 else if (inner2 == true_test_var)
1881 return (is_and ? var : inner2);
1882 else if (inner1 == false_test_var)
1884 ? boolean_false_node
1885 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1886 else if (inner2 == false_test_var)
1888 ? boolean_false_node
1889 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1891 /* Next, redistribute/reassociate the AND across the inner tests.
1892 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1893 if (TREE_CODE (inner1) == SSA_NAME
1894 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1895 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1896 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1897 gimple_assign_rhs1 (s),
1898 gimple_assign_rhs2 (s),
1899 code2, op2a, op2b)))
1901 /* Handle the AND case, where we are reassociating:
1902 (inner1 AND inner2) AND (op2a code2 op2b)
1904 If the partial result t is a constant, we win. Otherwise
1905 continue on to try reassociating with the other inner test. */
1908 if (integer_onep (t))
1910 else if (integer_zerop (t))
1911 return boolean_false_node;
1914 /* Handle the OR case, where we are redistributing:
1915 (inner1 OR inner2) AND (op2a code2 op2b)
1916 => (t OR (inner2 AND (op2a code2 op2b))) */
1917 else if (integer_onep (t))
1918 return boolean_true_node;
1920 /* Save partial result for later. */
1924 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1925 if (TREE_CODE (inner2) == SSA_NAME
1926 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1927 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1928 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1929 gimple_assign_rhs1 (s),
1930 gimple_assign_rhs2 (s),
1931 code2, op2a, op2b)))
1933 /* Handle the AND case, where we are reassociating:
1934 (inner1 AND inner2) AND (op2a code2 op2b)
1935 => (inner1 AND t) */
1938 if (integer_onep (t))
1940 else if (integer_zerop (t))
1941 return boolean_false_node;
1942 /* If both are the same, we can apply the identity
1944 else if (partial && same_bool_result_p (t, partial))
1948 /* Handle the OR case. where we are redistributing:
1949 (inner1 OR inner2) AND (op2a code2 op2b)
1950 => (t OR (inner1 AND (op2a code2 op2b)))
1951 => (t OR partial) */
1954 if (integer_onep (t))
1955 return boolean_true_node;
1958 /* We already got a simplification for the other
1959 operand to the redistributed OR expression. The
1960 interesting case is when at least one is false.
1961 Or, if both are the same, we can apply the identity
1963 if (integer_zerop (partial))
1965 else if (integer_zerop (t))
1967 else if (same_bool_result_p (t, partial))
1976 /* Try to simplify the AND of two comparisons defined by
1977 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1978 If this can be done without constructing an intermediate value,
1979 return the resulting tree; otherwise NULL_TREE is returned.
1980 This function is deliberately asymmetric as it recurses on SSA_DEFs
1981 in the first comparison but not the second. */
1984 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1985 enum tree_code code2, tree op2a, tree op2b)
1987 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1988 if (operand_equal_p (op1a, op2a, 0)
1989 && operand_equal_p (op1b, op2b, 0))
1991 tree t = combine_comparisons (UNKNOWN_LOCATION,
1992 TRUTH_ANDIF_EXPR, code1, code2,
1993 boolean_type_node, op1a, op1b);
1998 /* Likewise the swapped case of the above. */
1999 if (operand_equal_p (op1a, op2b, 0)
2000 && operand_equal_p (op1b, op2a, 0))
2002 tree t = combine_comparisons (UNKNOWN_LOCATION,
2003 TRUTH_ANDIF_EXPR, code1,
2004 swap_tree_comparison (code2),
2005 boolean_type_node, op1a, op1b);
2010 /* If both comparisons are of the same value against constants, we might
2011 be able to merge them. */
2012 if (operand_equal_p (op1a, op2a, 0)
2013 && TREE_CODE (op1b) == INTEGER_CST
2014 && TREE_CODE (op2b) == INTEGER_CST)
2016 int cmp = tree_int_cst_compare (op1b, op2b);
2018 /* If we have (op1a == op1b), we should either be able to
2019 return that or FALSE, depending on whether the constant op1b
2020 also satisfies the other comparison against op2b. */
2021 if (code1 == EQ_EXPR)
2027 case EQ_EXPR: val = (cmp == 0); break;
2028 case NE_EXPR: val = (cmp != 0); break;
2029 case LT_EXPR: val = (cmp < 0); break;
2030 case GT_EXPR: val = (cmp > 0); break;
2031 case LE_EXPR: val = (cmp <= 0); break;
2032 case GE_EXPR: val = (cmp >= 0); break;
2033 default: done = false;
2038 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2040 return boolean_false_node;
2043 /* Likewise if the second comparison is an == comparison. */
2044 else if (code2 == EQ_EXPR)
2050 case EQ_EXPR: val = (cmp == 0); break;
2051 case NE_EXPR: val = (cmp != 0); break;
2052 case LT_EXPR: val = (cmp > 0); break;
2053 case GT_EXPR: val = (cmp < 0); break;
2054 case LE_EXPR: val = (cmp >= 0); break;
2055 case GE_EXPR: val = (cmp <= 0); break;
2056 default: done = false;
2061 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2063 return boolean_false_node;
2067 /* Same business with inequality tests. */
2068 else if (code1 == NE_EXPR)
2073 case EQ_EXPR: val = (cmp != 0); break;
2074 case NE_EXPR: val = (cmp == 0); break;
2075 case LT_EXPR: val = (cmp >= 0); break;
2076 case GT_EXPR: val = (cmp <= 0); break;
2077 case LE_EXPR: val = (cmp > 0); break;
2078 case GE_EXPR: val = (cmp < 0); break;
2083 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2085 else if (code2 == NE_EXPR)
2090 case EQ_EXPR: val = (cmp == 0); break;
2091 case NE_EXPR: val = (cmp != 0); break;
2092 case LT_EXPR: val = (cmp <= 0); break;
2093 case GT_EXPR: val = (cmp >= 0); break;
2094 case LE_EXPR: val = (cmp < 0); break;
2095 case GE_EXPR: val = (cmp > 0); break;
2100 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2103 /* Chose the more restrictive of two < or <= comparisons. */
2104 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2105 && (code2 == LT_EXPR || code2 == LE_EXPR))
2107 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2108 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2110 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2113 /* Likewise chose the more restrictive of two > or >= comparisons. */
2114 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2115 && (code2 == GT_EXPR || code2 == GE_EXPR))
2117 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2118 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2120 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2123 /* Check for singleton ranges. */
2125 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2126 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2127 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2129 /* Check for disjoint ranges. */
2131 && (code1 == LT_EXPR || code1 == LE_EXPR)
2132 && (code2 == GT_EXPR || code2 == GE_EXPR))
2133 return boolean_false_node;
2135 && (code1 == GT_EXPR || code1 == GE_EXPR)
2136 && (code2 == LT_EXPR || code2 == LE_EXPR))
2137 return boolean_false_node;
2140 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2141 NAME's definition is a truth value. See if there are any simplifications
2142 that can be done against the NAME's definition. */
2143 if (TREE_CODE (op1a) == SSA_NAME
2144 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2145 && (integer_zerop (op1b) || integer_onep (op1b)))
2147 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2148 || (code1 == NE_EXPR && integer_onep (op1b)));
2149 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2150 switch (gimple_code (stmt))
2153 /* Try to simplify by copy-propagating the definition. */
2154 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2157 /* If every argument to the PHI produces the same result when
2158 ANDed with the second comparison, we win.
2159 Do not do this unless the type is bool since we need a bool
2160 result here anyway. */
2161 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2163 tree result = NULL_TREE;
2165 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2167 tree arg = gimple_phi_arg_def (stmt, i);
2169 /* If this PHI has itself as an argument, ignore it.
2170 If all the other args produce the same result,
2172 if (arg == gimple_phi_result (stmt))
2174 else if (TREE_CODE (arg) == INTEGER_CST)
2176 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2179 result = boolean_false_node;
2180 else if (!integer_zerop (result))
2184 result = fold_build2 (code2, boolean_type_node,
2186 else if (!same_bool_comparison_p (result,
2190 else if (TREE_CODE (arg) == SSA_NAME)
2192 tree temp = and_var_with_comparison (arg, invert,
2198 else if (!same_bool_result_p (result, temp))
2214 /* Try to simplify the AND of two comparisons, specified by
2215 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2216 If this can be simplified to a single expression (without requiring
2217 introducing more SSA variables to hold intermediate values),
2218 return the resulting tree. Otherwise return NULL_TREE.
2219 If the result expression is non-null, it has boolean type. */
2222 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2223 enum tree_code code2, tree op2a, tree op2b)
2225 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2229 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2232 /* Helper function for or_comparisons_1: try to simplify the OR of the
2233 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2234 If INVERT is true, invert the value of VAR before doing the OR.
2235 Return NULL_EXPR if we can't simplify this to a single expression. */
2238 or_var_with_comparison (tree var, bool invert,
2239 enum tree_code code2, tree op2a, tree op2b)
2242 gimple stmt = SSA_NAME_DEF_STMT (var);
2244 /* We can only deal with variables whose definitions are assignments. */
2245 if (!is_gimple_assign (stmt))
2248 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2249 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2250 Then we only have to consider the simpler non-inverted cases. */
2252 t = and_var_with_comparison_1 (stmt,
2253 invert_tree_comparison (code2, false),
2256 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2257 return canonicalize_bool (t, invert);
2260 /* Try to simplify the OR of the ssa variable defined by the assignment
2261 STMT with the comparison specified by (OP2A CODE2 OP2B).
2262 Return NULL_EXPR if we can't simplify this to a single expression. */
2265 or_var_with_comparison_1 (gimple stmt,
2266 enum tree_code code2, tree op2a, tree op2b)
2268 tree var = gimple_assign_lhs (stmt);
2269 tree true_test_var = NULL_TREE;
2270 tree false_test_var = NULL_TREE;
2271 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2273 /* Check for identities like (var OR (var != 0)) => true . */
2274 if (TREE_CODE (op2a) == SSA_NAME
2275 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2277 if ((code2 == NE_EXPR && integer_zerop (op2b))
2278 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2280 true_test_var = op2a;
2281 if (var == true_test_var)
2284 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2285 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2287 false_test_var = op2a;
2288 if (var == false_test_var)
2289 return boolean_true_node;
2293 /* If the definition is a comparison, recurse on it. */
2294 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2296 tree t = or_comparisons_1 (innercode,
2297 gimple_assign_rhs1 (stmt),
2298 gimple_assign_rhs2 (stmt),
2306 /* If the definition is an AND or OR expression, we may be able to
2307 simplify by reassociating. */
2308 if (innercode == TRUTH_AND_EXPR
2309 || innercode == TRUTH_OR_EXPR
2310 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2311 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2313 tree inner1 = gimple_assign_rhs1 (stmt);
2314 tree inner2 = gimple_assign_rhs2 (stmt);
2317 tree partial = NULL_TREE;
2318 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2320 /* Check for boolean identities that don't require recursive examination
2322 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2323 inner1 OR (inner1 AND inner2) => inner1
2324 !inner1 OR (inner1 OR inner2) => true
2325 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2327 if (inner1 == true_test_var)
2328 return (is_or ? var : inner1);
2329 else if (inner2 == true_test_var)
2330 return (is_or ? var : inner2);
2331 else if (inner1 == false_test_var)
2334 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2335 else if (inner2 == false_test_var)
2338 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2340 /* Next, redistribute/reassociate the OR across the inner tests.
2341 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2342 if (TREE_CODE (inner1) == SSA_NAME
2343 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2344 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2345 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2346 gimple_assign_rhs1 (s),
2347 gimple_assign_rhs2 (s),
2348 code2, op2a, op2b)))
2350 /* Handle the OR case, where we are reassociating:
2351 (inner1 OR inner2) OR (op2a code2 op2b)
2353 If the partial result t is a constant, we win. Otherwise
2354 continue on to try reassociating with the other inner test. */
2357 if (integer_onep (t))
2358 return boolean_true_node;
2359 else if (integer_zerop (t))
2363 /* Handle the AND case, where we are redistributing:
2364 (inner1 AND inner2) OR (op2a code2 op2b)
2365 => (t AND (inner2 OR (op2a code op2b))) */
2366 else if (integer_zerop (t))
2367 return boolean_false_node;
2369 /* Save partial result for later. */
2373 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2374 if (TREE_CODE (inner2) == SSA_NAME
2375 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2376 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2377 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2378 gimple_assign_rhs1 (s),
2379 gimple_assign_rhs2 (s),
2380 code2, op2a, op2b)))
2382 /* Handle the OR case, where we are reassociating:
2383 (inner1 OR inner2) OR (op2a code2 op2b)
2385 => (t OR partial) */
2388 if (integer_zerop (t))
2390 else if (integer_onep (t))
2391 return boolean_true_node;
2392 /* If both are the same, we can apply the identity
2394 else if (partial && same_bool_result_p (t, partial))
2398 /* Handle the AND case, where we are redistributing:
2399 (inner1 AND inner2) OR (op2a code2 op2b)
2400 => (t AND (inner1 OR (op2a code2 op2b)))
2401 => (t AND partial) */
2404 if (integer_zerop (t))
2405 return boolean_false_node;
2408 /* We already got a simplification for the other
2409 operand to the redistributed AND expression. The
2410 interesting case is when at least one is true.
2411 Or, if both are the same, we can apply the identity
2413 if (integer_onep (partial))
2415 else if (integer_onep (t))
2417 else if (same_bool_result_p (t, partial))
2426 /* Try to simplify the OR of two comparisons defined by
2427 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2428 If this can be done without constructing an intermediate value,
2429 return the resulting tree; otherwise NULL_TREE is returned.
2430 This function is deliberately asymmetric as it recurses on SSA_DEFs
2431 in the first comparison but not the second. */
2434 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2435 enum tree_code code2, tree op2a, tree op2b)
2437 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2438 if (operand_equal_p (op1a, op2a, 0)
2439 && operand_equal_p (op1b, op2b, 0))
2441 tree t = combine_comparisons (UNKNOWN_LOCATION,
2442 TRUTH_ORIF_EXPR, code1, code2,
2443 boolean_type_node, op1a, op1b);
2448 /* Likewise the swapped case of the above. */
2449 if (operand_equal_p (op1a, op2b, 0)
2450 && operand_equal_p (op1b, op2a, 0))
2452 tree t = combine_comparisons (UNKNOWN_LOCATION,
2453 TRUTH_ORIF_EXPR, code1,
2454 swap_tree_comparison (code2),
2455 boolean_type_node, op1a, op1b);
2460 /* If both comparisons are of the same value against constants, we might
2461 be able to merge them. */
2462 if (operand_equal_p (op1a, op2a, 0)
2463 && TREE_CODE (op1b) == INTEGER_CST
2464 && TREE_CODE (op2b) == INTEGER_CST)
2466 int cmp = tree_int_cst_compare (op1b, op2b);
2468 /* If we have (op1a != op1b), we should either be able to
2469 return that or TRUE, depending on whether the constant op1b
2470 also satisfies the other comparison against op2b. */
2471 if (code1 == NE_EXPR)
2477 case EQ_EXPR: val = (cmp == 0); break;
2478 case NE_EXPR: val = (cmp != 0); break;
2479 case LT_EXPR: val = (cmp < 0); break;
2480 case GT_EXPR: val = (cmp > 0); break;
2481 case LE_EXPR: val = (cmp <= 0); break;
2482 case GE_EXPR: val = (cmp >= 0); break;
2483 default: done = false;
2488 return boolean_true_node;
2490 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2493 /* Likewise if the second comparison is a != comparison. */
2494 else if (code2 == NE_EXPR)
2500 case EQ_EXPR: val = (cmp == 0); break;
2501 case NE_EXPR: val = (cmp != 0); break;
2502 case LT_EXPR: val = (cmp > 0); break;
2503 case GT_EXPR: val = (cmp < 0); break;
2504 case LE_EXPR: val = (cmp >= 0); break;
2505 case GE_EXPR: val = (cmp <= 0); break;
2506 default: done = false;
2511 return boolean_true_node;
2513 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2517 /* See if an equality test is redundant with the other comparison. */
2518 else if (code1 == EQ_EXPR)
2523 case EQ_EXPR: val = (cmp == 0); break;
2524 case NE_EXPR: val = (cmp != 0); break;
2525 case LT_EXPR: val = (cmp < 0); break;
2526 case GT_EXPR: val = (cmp > 0); break;
2527 case LE_EXPR: val = (cmp <= 0); break;
2528 case GE_EXPR: val = (cmp >= 0); break;
2533 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2535 else if (code2 == EQ_EXPR)
2540 case EQ_EXPR: val = (cmp == 0); break;
2541 case NE_EXPR: val = (cmp != 0); break;
2542 case LT_EXPR: val = (cmp > 0); break;
2543 case GT_EXPR: val = (cmp < 0); break;
2544 case LE_EXPR: val = (cmp >= 0); break;
2545 case GE_EXPR: val = (cmp <= 0); break;
2550 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2553 /* Chose the less restrictive of two < or <= comparisons. */
2554 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2555 && (code2 == LT_EXPR || code2 == LE_EXPR))
2557 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2558 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2560 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2563 /* Likewise chose the less restrictive of two > or >= comparisons. */
2564 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2565 && (code2 == GT_EXPR || code2 == GE_EXPR))
2567 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2568 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2570 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2573 /* Check for singleton ranges. */
2575 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2576 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2577 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2579 /* Check for less/greater pairs that don't restrict the range at all. */
2581 && (code1 == LT_EXPR || code1 == LE_EXPR)
2582 && (code2 == GT_EXPR || code2 == GE_EXPR))
2583 return boolean_true_node;
2585 && (code1 == GT_EXPR || code1 == GE_EXPR)
2586 && (code2 == LT_EXPR || code2 == LE_EXPR))
2587 return boolean_true_node;
2590 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2591 NAME's definition is a truth value. See if there are any simplifications
2592 that can be done against the NAME's definition. */
2593 if (TREE_CODE (op1a) == SSA_NAME
2594 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2595 && (integer_zerop (op1b) || integer_onep (op1b)))
2597 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2598 || (code1 == NE_EXPR && integer_onep (op1b)));
2599 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2600 switch (gimple_code (stmt))
2603 /* Try to simplify by copy-propagating the definition. */
2604 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2607 /* If every argument to the PHI produces the same result when
2608 ORed with the second comparison, we win.
2609 Do not do this unless the type is bool since we need a bool
2610 result here anyway. */
2611 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2613 tree result = NULL_TREE;
2615 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2617 tree arg = gimple_phi_arg_def (stmt, i);
2619 /* If this PHI has itself as an argument, ignore it.
2620 If all the other args produce the same result,
2622 if (arg == gimple_phi_result (stmt))
2624 else if (TREE_CODE (arg) == INTEGER_CST)
2626 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2629 result = boolean_true_node;
2630 else if (!integer_onep (result))
2634 result = fold_build2 (code2, boolean_type_node,
2636 else if (!same_bool_comparison_p (result,
2640 else if (TREE_CODE (arg) == SSA_NAME)
2642 tree temp = or_var_with_comparison (arg, invert,
2648 else if (!same_bool_result_p (result, temp))
2664 /* Try to simplify the OR of two comparisons, specified by
2665 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2666 If this can be simplified to a single expression (without requiring
2667 introducing more SSA variables to hold intermediate values),
2668 return the resulting tree. Otherwise return NULL_TREE.
2669 If the result expression is non-null, it has boolean type. */
2672 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2673 enum tree_code code2, tree op2a, tree op2b)
2675 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2679 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2683 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2685 Either NULL_TREE, a simplified but non-constant or a constant
2688 ??? This should go into a gimple-fold-inline.h file to be eventually
2689 privatized with the single valueize function used in the various TUs
2690 to avoid the indirect function call overhead. */
2693 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2695 location_t loc = gimple_location (stmt);
2696 switch (gimple_code (stmt))
2700 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2702 switch (get_gimple_rhs_class (subcode))
2704 case GIMPLE_SINGLE_RHS:
2706 tree rhs = gimple_assign_rhs1 (stmt);
2707 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2709 if (TREE_CODE (rhs) == SSA_NAME)
2711 /* If the RHS is an SSA_NAME, return its known constant value,
2713 return (*valueize) (rhs);
2715 /* Handle propagating invariant addresses into address
2717 else if (TREE_CODE (rhs) == ADDR_EXPR
2718 && !is_gimple_min_invariant (rhs))
2720 HOST_WIDE_INT offset;
2722 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2726 && (CONSTANT_CLASS_P (base)
2727 || decl_address_invariant_p (base)))
2728 return build_invariant_address (TREE_TYPE (rhs),
2731 else if (TREE_CODE (rhs) == CONSTRUCTOR
2732 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2733 && (CONSTRUCTOR_NELTS (rhs)
2734 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2740 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2742 val = (*valueize) (val);
2743 if (TREE_CODE (val) == INTEGER_CST
2744 || TREE_CODE (val) == REAL_CST
2745 || TREE_CODE (val) == FIXED_CST)
2746 list = tree_cons (NULL_TREE, val, list);
2751 return build_vector (TREE_TYPE (rhs), nreverse (list));
2754 if (kind == tcc_reference)
2756 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2757 || TREE_CODE (rhs) == REALPART_EXPR
2758 || TREE_CODE (rhs) == IMAGPART_EXPR)
2759 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2761 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2762 return fold_unary_loc (EXPR_LOCATION (rhs),
2764 TREE_TYPE (rhs), val);
2766 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2767 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2769 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2770 return fold_ternary_loc (EXPR_LOCATION (rhs),
2772 TREE_TYPE (rhs), val,
2773 TREE_OPERAND (rhs, 1),
2774 TREE_OPERAND (rhs, 2));
2776 else if (TREE_CODE (rhs) == MEM_REF
2777 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2779 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2780 if (TREE_CODE (val) == ADDR_EXPR
2781 && is_gimple_min_invariant (val))
2783 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2785 TREE_OPERAND (rhs, 1));
2790 return fold_const_aggregate_ref_1 (rhs, valueize);
2792 else if (kind == tcc_declaration)
2793 return get_symbol_constant_value (rhs);
2797 case GIMPLE_UNARY_RHS:
2799 /* Handle unary operators that can appear in GIMPLE form.
2800 Note that we know the single operand must be a constant,
2801 so this should almost always return a simplified RHS. */
2802 tree lhs = gimple_assign_lhs (stmt);
2803 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2805 /* Conversions are useless for CCP purposes if they are
2806 value-preserving. Thus the restrictions that
2807 useless_type_conversion_p places for pointer type conversions
2808 do not apply here. Substitution later will only substitute to
2810 if (CONVERT_EXPR_CODE_P (subcode)
2811 && POINTER_TYPE_P (TREE_TYPE (lhs))
2812 && POINTER_TYPE_P (TREE_TYPE (op0)))
2815 /* Try to re-construct array references on-the-fly. */
2816 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2818 && ((tem = maybe_fold_offset_to_address
2820 op0, integer_zero_node, TREE_TYPE (lhs)))
2827 fold_unary_ignore_overflow_loc (loc, subcode,
2828 gimple_expr_type (stmt), op0);
2831 case GIMPLE_BINARY_RHS:
2833 /* Handle binary operators that can appear in GIMPLE form. */
2834 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2835 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2837 /* Translate &x + CST into an invariant form suitable for
2838 further propagation. */
2839 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2840 && TREE_CODE (op0) == ADDR_EXPR
2841 && TREE_CODE (op1) == INTEGER_CST)
2843 tree off = fold_convert (ptr_type_node, op1);
2844 return build_fold_addr_expr
2845 (fold_build2 (MEM_REF,
2846 TREE_TYPE (TREE_TYPE (op0)),
2847 unshare_expr (op0), off));
2850 return fold_binary_loc (loc, subcode,
2851 gimple_expr_type (stmt), op0, op1);
2854 case GIMPLE_TERNARY_RHS:
2856 /* Handle ternary operators that can appear in GIMPLE form. */
2857 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2858 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2859 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2861 return fold_ternary_loc (loc, subcode,
2862 gimple_expr_type (stmt), op0, op1, op2);
2872 tree fn = (*valueize) (gimple_call_fn (stmt));
2873 if (TREE_CODE (fn) == ADDR_EXPR
2874 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2875 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2877 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2880 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2881 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2882 call = build_call_array_loc (loc,
2883 gimple_call_return_type (stmt),
2884 fn, gimple_call_num_args (stmt), args);
2885 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2887 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2888 STRIP_NOPS (retval);
2899 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2900 Returns NULL_TREE if folding to a constant is not possible, otherwise
2901 returns a constant according to is_gimple_min_invariant. */
2904 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2906 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2907 if (res && is_gimple_min_invariant (res))
2913 /* The following set of functions are supposed to fold references using
2914 their constant initializers. */
2916 static tree fold_ctor_reference (tree type, tree ctor,
2917 unsigned HOST_WIDE_INT offset,
2918 unsigned HOST_WIDE_INT size);
2920 /* See if we can find constructor defining value of BASE.
2921 When we know the consructor with constant offset (such as
2922 base is array[40] and we do know constructor of array), then
2923 BIT_OFFSET is adjusted accordingly.
2925 As a special case, return error_mark_node when constructor
2926 is not explicitly available, but it is known to be zero
2927 such as 'static const int a;'. */
2929 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2930 tree (*valueize)(tree))
2932 HOST_WIDE_INT bit_offset2, size, max_size;
2933 if (TREE_CODE (base) == MEM_REF)
2935 if (!integer_zerop (TREE_OPERAND (base, 1)))
2937 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2939 *bit_offset += (mem_ref_offset (base).low
2944 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2945 base = valueize (TREE_OPERAND (base, 0));
2946 if (!base || TREE_CODE (base) != ADDR_EXPR)
2948 base = TREE_OPERAND (base, 0);
2951 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2952 DECL_INITIAL. If BASE is a nested reference into another
2953 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2954 the inner reference. */
2955 switch (TREE_CODE (base))
2958 if (!const_value_known_p (base))
2963 if (!DECL_INITIAL (base)
2964 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2965 return error_mark_node;
2966 return DECL_INITIAL (base);
2970 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2971 if (max_size == -1 || size != max_size)
2973 *bit_offset += bit_offset2;
2974 return get_base_constructor (base, bit_offset, valueize);
2985 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2986 to the memory at bit OFFSET.
2988 We do only simple job of folding byte accesses. */
2991 fold_string_cst_ctor_reference (tree type, tree ctor,
2992 unsigned HOST_WIDE_INT offset,
2993 unsigned HOST_WIDE_INT size)
2995 if (INTEGRAL_TYPE_P (type)
2996 && (TYPE_MODE (type)
2997 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2998 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3000 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3001 && size == BITS_PER_UNIT
3002 && !(offset % BITS_PER_UNIT))
3004 offset /= BITS_PER_UNIT;
3005 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3006 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3009 const char a[20]="hello";
3012 might lead to offset greater than string length. In this case we
3013 know value is either initialized to 0 or out of bounds. Return 0
3015 return build_zero_cst (type);
3020 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3021 SIZE to the memory at bit OFFSET. */
3024 fold_array_ctor_reference (tree type, tree ctor,
3025 unsigned HOST_WIDE_INT offset,
3026 unsigned HOST_WIDE_INT size)
3028 unsigned HOST_WIDE_INT cnt;
3030 double_int low_bound, elt_size;
3031 double_int index, max_index;
3032 double_int access_index;
3033 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3034 HOST_WIDE_INT inner_offset;
3036 /* Compute low bound and elt size. */
3037 if (domain_type && TYPE_MIN_VALUE (domain_type))
3039 /* Static constructors for variably sized objects makes no sense. */
3040 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3041 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3044 low_bound = double_int_zero;
3045 /* Static constructors for variably sized objects makes no sense. */
3046 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3049 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3052 /* We can handle only constantly sized accesses that are known to not
3053 be larger than size of array element. */
3054 if (!TYPE_SIZE_UNIT (type)
3055 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3056 || double_int_cmp (elt_size,
3057 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3060 /* Compute the array index we look for. */
3061 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3062 elt_size, TRUNC_DIV_EXPR);
3063 access_index = double_int_add (access_index, low_bound);
3065 /* And offset within the access. */
3066 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3068 /* See if the array field is large enough to span whole access. We do not
3069 care to fold accesses spanning multiple array indexes. */
3070 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3073 index = double_int_sub (low_bound, double_int_one);
3074 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3076 /* Array constructor might explicitely set index, or specify range
3077 or leave index NULL meaning that it is next index after previous
3081 if (TREE_CODE (cfield) == INTEGER_CST)
3082 max_index = index = tree_to_double_int (cfield);
3085 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3086 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3087 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3091 max_index = index = double_int_add (index, double_int_one);
3093 /* Do we have match? */
3094 if (double_int_cmp (access_index, index, 1) >= 0
3095 && double_int_cmp (access_index, max_index, 1) <= 0)
3096 return fold_ctor_reference (type, cval, inner_offset, size);
3098 /* When memory is not explicitely mentioned in constructor,
3099 it is 0 (or out of range). */
3100 return build_zero_cst (type);
3103 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3104 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3107 fold_nonarray_ctor_reference (tree type, tree ctor,
3108 unsigned HOST_WIDE_INT offset,
3109 unsigned HOST_WIDE_INT size)
3111 unsigned HOST_WIDE_INT cnt;
3114 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3117 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3118 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3119 tree field_size = DECL_SIZE (cfield);
3120 double_int bitoffset;
3121 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3122 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3123 double_int bitoffset_end;
3125 /* Variable sized objects in static constructors makes no sense,
3126 but field_size can be NULL for flexible array members. */
3127 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3128 && TREE_CODE (byte_offset) == INTEGER_CST
3129 && (field_size != NULL_TREE
3130 ? TREE_CODE (field_size) == INTEGER_CST
3131 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3133 /* Compute bit offset of the field. */
3134 bitoffset = double_int_add (tree_to_double_int (field_offset),
3135 double_int_mul (byte_offset_cst,
3136 bits_per_unit_cst));
3137 /* Compute bit offset where the field ends. */
3138 if (field_size != NULL_TREE)
3139 bitoffset_end = double_int_add (bitoffset,
3140 tree_to_double_int (field_size));
3142 bitoffset_end = double_int_zero;
3144 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3145 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3146 && (field_size == NULL_TREE
3147 || double_int_cmp (uhwi_to_double_int (offset),
3148 bitoffset_end, 0) < 0))
3150 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3151 uhwi_to_double_int (size));
3152 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3154 /* We do have overlap. Now see if field is large enough to
3155 cover the access. Give up for accesses spanning multiple
3157 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3159 return fold_ctor_reference (type, cval,
3160 double_int_to_uhwi (inner_offset), size);
3163 /* When memory is not explicitely mentioned in constructor, it is 0. */
3164 return build_zero_cst (type);
3167 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3168 to the memory at bit OFFSET. */
3171 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3172 unsigned HOST_WIDE_INT size)
3176 /* We found the field with exact match. */
3177 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3179 return canonicalize_constructor_val (ctor);
3181 /* We are at the end of walk, see if we can view convert the
3183 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3184 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3185 && operand_equal_p (TYPE_SIZE (type),
3186 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3188 ret = canonicalize_constructor_val (ctor);
3189 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3194 if (TREE_CODE (ctor) == STRING_CST)
3195 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3196 if (TREE_CODE (ctor) == CONSTRUCTOR)
3199 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3200 return fold_array_ctor_reference (type, ctor, offset, size);
3202 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3208 /* Return the tree representing the element referenced by T if T is an
3209 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3210 names using VALUEIZE. Return NULL_TREE otherwise. */
3213 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3215 tree ctor, idx, base;
3216 HOST_WIDE_INT offset, size, max_size;
3219 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3220 return get_symbol_constant_value (t);
3222 tem = fold_read_from_constant_string (t);
3226 switch (TREE_CODE (t))
3229 case ARRAY_RANGE_REF:
3230 /* Constant indexes are handled well by get_base_constructor.
3231 Only special case variable offsets.
3232 FIXME: This code can't handle nested references with variable indexes
3233 (they will be handled only by iteration of ccp). Perhaps we can bring
3234 get_ref_base_and_extent here and make it use a valueize callback. */
3235 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3237 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3238 && host_integerp (idx, 0))
3240 tree low_bound, unit_size;
3242 /* If the resulting bit-offset is constant, track it. */
3243 if ((low_bound = array_ref_low_bound (t),
3244 host_integerp (low_bound, 0))
3245 && (unit_size = array_ref_element_size (t),
3246 host_integerp (unit_size, 1)))
3248 offset = TREE_INT_CST_LOW (idx);
3249 offset -= TREE_INT_CST_LOW (low_bound);
3250 offset *= TREE_INT_CST_LOW (unit_size);
3251 offset *= BITS_PER_UNIT;
3253 base = TREE_OPERAND (t, 0);
3254 ctor = get_base_constructor (base, &offset, valueize);
3255 /* Empty constructor. Always fold to 0. */
3256 if (ctor == error_mark_node)
3257 return build_zero_cst (TREE_TYPE (t));
3258 /* Out of bound array access. Value is undefined,
3262 /* We can not determine ctor. */
3265 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3266 TREE_INT_CST_LOW (unit_size)
3274 case TARGET_MEM_REF:
3276 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3277 ctor = get_base_constructor (base, &offset, valueize);
3279 /* Empty constructor. Always fold to 0. */
3280 if (ctor == error_mark_node)
3281 return build_zero_cst (TREE_TYPE (t));
3282 /* We do not know precise address. */
3283 if (max_size == -1 || max_size != size)
3285 /* We can not determine ctor. */
3289 /* Out of bound array access. Value is undefined, but don't fold. */
3293 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3298 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3299 if (c && TREE_CODE (c) == COMPLEX_CST)
3300 return fold_build1_loc (EXPR_LOCATION (t),
3301 TREE_CODE (t), TREE_TYPE (t), c);
3313 fold_const_aggregate_ref (tree t)
3315 return fold_const_aggregate_ref_1 (t, NULL);