1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "basic-block.h"
34 #include "diagnostic.h"
36 #include "tree-dump.h"
37 #include "tree-flow.h"
38 #include "tree-pass.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "langhooks.h"
45 /* If SYM is a constant variable with known value, return the value.
46 NULL_TREE is returned otherwise. */
49 get_symbol_constant_value (tree sym)
52 && (TREE_READONLY (sym)
53 || TREE_CODE (sym) == CONST_DECL))
55 tree val = DECL_INITIAL (sym);
59 if (is_gimple_min_invariant (val))
61 if (TREE_CODE (val) == ADDR_EXPR)
63 tree base = get_base_address (TREE_OPERAND (val, 0));
64 if (base && TREE_CODE (base) == VAR_DECL)
66 TREE_ADDRESSABLE (base) = 1;
67 if (gimple_referenced_vars (cfun))
68 add_referenced_var (base);
74 /* Variables declared 'const' without an initializer
75 have zero as the initializer if they may not be
76 overridden at link or run time. */
78 && !DECL_EXTERNAL (sym)
79 && targetm.binds_local_p (sym)
80 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
81 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
82 return fold_convert (TREE_TYPE (sym), integer_zero_node);
89 /* Return true if we may propagate the address expression ADDR into the
90 dereference DEREF and cancel them. */
93 may_propagate_address_into_dereference (tree addr, tree deref)
95 gcc_assert (INDIRECT_REF_P (deref)
96 && TREE_CODE (addr) == ADDR_EXPR);
98 /* Don't propagate if ADDR's operand has incomplete type. */
99 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
102 /* If the address is invariant then we do not need to preserve restrict
103 qualifications. But we do need to preserve volatile qualifiers until
104 we can annotate the folded dereference itself properly. */
105 if (is_gimple_min_invariant (addr)
106 && (!TREE_THIS_VOLATILE (deref)
107 || TYPE_VOLATILE (TREE_TYPE (addr))))
108 return useless_type_conversion_p (TREE_TYPE (deref),
109 TREE_TYPE (TREE_OPERAND (addr, 0)));
111 /* Else both the address substitution and the folding must result in
112 a valid useless type conversion sequence. */
113 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
115 && useless_type_conversion_p (TREE_TYPE (deref),
116 TREE_TYPE (TREE_OPERAND (addr, 0))));
120 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
121 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
122 is the desired result type.
124 LOC is the location of the original expression. */
127 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
129 bool allow_negative_idx)
131 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
132 tree array_type, elt_type, elt_size;
135 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
136 measured in units of the size of elements type) from that ARRAY_REF).
137 We can't do anything if either is variable.
139 The case we handle here is *(&A[N]+O). */
140 if (TREE_CODE (base) == ARRAY_REF)
142 tree low_bound = array_ref_low_bound (base);
144 elt_offset = TREE_OPERAND (base, 1);
145 if (TREE_CODE (low_bound) != INTEGER_CST
146 || TREE_CODE (elt_offset) != INTEGER_CST)
149 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
150 base = TREE_OPERAND (base, 0);
153 /* Ignore stupid user tricks of indexing non-array variables. */
154 array_type = TREE_TYPE (base);
155 if (TREE_CODE (array_type) != ARRAY_TYPE)
157 elt_type = TREE_TYPE (array_type);
158 if (!useless_type_conversion_p (orig_type, elt_type))
161 /* Use signed size type for intermediate computation on the index. */
162 idx_type = ssizetype;
164 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
165 element type (so we can use the alignment if it's not constant).
166 Otherwise, compute the offset as an index by using a division. If the
167 division isn't exact, then don't do anything. */
168 elt_size = TYPE_SIZE_UNIT (elt_type);
171 if (integer_zerop (offset))
173 if (TREE_CODE (elt_size) != INTEGER_CST)
174 elt_size = size_int (TYPE_ALIGN (elt_type));
176 idx = build_int_cst (idx_type, 0);
180 unsigned HOST_WIDE_INT lquo, lrem;
181 HOST_WIDE_INT hquo, hrem;
184 /* The final array offset should be signed, so we need
185 to sign-extend the (possibly pointer) offset here
186 and use signed division. */
187 soffset = double_int_sext (tree_to_double_int (offset),
188 TYPE_PRECISION (TREE_TYPE (offset)));
189 if (TREE_CODE (elt_size) != INTEGER_CST
190 || div_and_round_double (TRUNC_DIV_EXPR, 0,
191 soffset.low, soffset.high,
192 TREE_INT_CST_LOW (elt_size),
193 TREE_INT_CST_HIGH (elt_size),
194 &lquo, &hquo, &lrem, &hrem)
198 idx = build_int_cst_wide (idx_type, lquo, hquo);
201 /* Assume the low bound is zero. If there is a domain type, get the
202 low bound, if any, convert the index into that type, and add the
204 min_idx = build_int_cst (idx_type, 0);
205 domain_type = TYPE_DOMAIN (array_type);
208 idx_type = domain_type;
209 if (TYPE_MIN_VALUE (idx_type))
210 min_idx = TYPE_MIN_VALUE (idx_type);
212 min_idx = fold_convert (idx_type, min_idx);
214 if (TREE_CODE (min_idx) != INTEGER_CST)
217 elt_offset = fold_convert (idx_type, elt_offset);
220 if (!integer_zerop (min_idx))
221 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
222 if (!integer_zerop (elt_offset))
223 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
225 /* Make sure to possibly truncate late after offsetting. */
226 idx = fold_convert (idx_type, idx);
228 /* We don't want to construct access past array bounds. For example
231 should not be simplified into (*c)[14] or tree-vrp will
232 give false warnings. The same is true for
233 struct A { long x; char d[0]; } *a;
235 which should be not folded to &a->d[-8]. */
237 && TYPE_MAX_VALUE (domain_type)
238 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
240 tree up_bound = TYPE_MAX_VALUE (domain_type);
242 if (tree_int_cst_lt (up_bound, idx)
243 /* Accesses after the end of arrays of size 0 (gcc
244 extension) and 1 are likely intentional ("struct
246 && compare_tree_int (up_bound, 1) > 0)
250 && TYPE_MIN_VALUE (domain_type))
252 if (!allow_negative_idx
253 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
254 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
257 else if (!allow_negative_idx
258 && compare_tree_int (idx, 0) < 0)
262 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
263 SET_EXPR_LOCATION (t, loc);
269 /* Attempt to fold *(S+O) to S.X.
270 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
271 is the desired result type.
273 LOC is the location of the original expression. */
276 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
277 tree base, tree offset, tree orig_type)
279 tree f, t, field_type, tail_array_field, field_offset;
283 if (TREE_CODE (record_type) != RECORD_TYPE
284 && TREE_CODE (record_type) != UNION_TYPE
285 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
288 /* Short-circuit silly cases. */
289 if (useless_type_conversion_p (record_type, orig_type))
292 tail_array_field = NULL_TREE;
293 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
297 if (TREE_CODE (f) != FIELD_DECL)
299 if (DECL_BIT_FIELD (f))
302 if (!DECL_FIELD_OFFSET (f))
304 field_offset = byte_position (f);
305 if (TREE_CODE (field_offset) != INTEGER_CST)
308 /* ??? Java creates "interesting" fields for representing base classes.
309 They have no name, and have no context. With no context, we get into
310 trouble with nonoverlapping_component_refs_p. Skip them. */
311 if (!DECL_FIELD_CONTEXT (f))
314 /* The previous array field isn't at the end. */
315 tail_array_field = NULL_TREE;
317 /* Check to see if this offset overlaps with the field. */
318 cmp = tree_int_cst_compare (field_offset, offset);
322 field_type = TREE_TYPE (f);
324 /* Here we exactly match the offset being checked. If the types match,
325 then we can return that field. */
327 && useless_type_conversion_p (orig_type, field_type))
329 t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
333 /* Don't care about offsets into the middle of scalars. */
334 if (!AGGREGATE_TYPE_P (field_type))
337 /* Check for array at the end of the struct. This is often
338 used as for flexible array members. We should be able to
339 turn this into an array access anyway. */
340 if (TREE_CODE (field_type) == ARRAY_TYPE)
341 tail_array_field = f;
343 /* Check the end of the field against the offset. */
344 if (!DECL_SIZE_UNIT (f)
345 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
347 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
348 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
351 /* If we matched, then set offset to the displacement into
353 new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
354 SET_EXPR_LOCATION (new_base, loc);
356 /* Recurse to possibly find the match. */
357 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
358 f == TYPE_FIELDS (record_type));
361 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
367 if (!tail_array_field)
370 f = tail_array_field;
371 field_type = TREE_TYPE (f);
372 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
374 /* If we get here, we've got an aggregate field, and a possibly
375 nonzero offset into them. Recurse and hope for a valid match. */
376 base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
377 SET_EXPR_LOCATION (base, loc);
379 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
380 f == TYPE_FIELDS (record_type));
383 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
387 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
388 or BASE[index] or by combination of those.
390 LOC is the location of original expression.
392 Before attempting the conversion strip off existing ADDR_EXPRs and
393 handled component refs. */
396 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
403 if (TREE_CODE (base) != ADDR_EXPR)
406 base = TREE_OPERAND (base, 0);
408 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
409 so it needs to be removed and new COMPONENT_REF constructed.
410 The wrong COMPONENT_REF are often constructed by folding the
411 (type *)&object within the expression (type *)&object+offset */
412 if (handled_component_p (base))
414 HOST_WIDE_INT sub_offset, size, maxsize;
416 newbase = get_ref_base_and_extent (base, &sub_offset,
418 gcc_assert (newbase);
421 && !(sub_offset & (BITS_PER_UNIT - 1)))
425 offset = int_const_binop (PLUS_EXPR, offset,
426 build_int_cst (TREE_TYPE (offset),
427 sub_offset / BITS_PER_UNIT), 1);
430 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
431 && integer_zerop (offset))
433 type = TREE_TYPE (base);
435 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
437 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
442 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
443 or &BASE[index] or by combination of those.
445 LOC is the location of the original expression.
447 Before attempting the conversion strip off existing component refs. */
450 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
455 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
456 && POINTER_TYPE_P (orig_type));
458 t = maybe_fold_offset_to_reference (loc, addr, offset,
459 TREE_TYPE (orig_type));
465 /* For __builtin_object_size to function correctly we need to
466 make sure not to fold address arithmetic so that we change
467 reference from one array to another. This would happen for
470 struct X { char s1[10]; char s2[10] } s;
471 char *foo (void) { return &s.s2[-4]; }
473 where we need to avoid generating &s.s1[6]. As the C and
474 C++ frontends create different initial trees
475 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
476 sophisticated comparisons here. Note that checking for the
477 condition after the fact is easier than trying to avoid doing
480 if (TREE_CODE (orig) == ADDR_EXPR)
481 orig = TREE_OPERAND (orig, 0);
482 if ((TREE_CODE (orig) == ARRAY_REF
483 || (TREE_CODE (orig) == COMPONENT_REF
484 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
485 && (TREE_CODE (t) == ARRAY_REF
486 || TREE_CODE (t) == COMPONENT_REF)
487 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
488 ? TREE_OPERAND (orig, 0) : orig,
489 TREE_CODE (t) == ARRAY_REF
490 ? TREE_OPERAND (t, 0) : t, 0))
493 ptr_type = build_pointer_type (TREE_TYPE (t));
494 if (!useless_type_conversion_p (orig_type, ptr_type))
496 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
502 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
503 Return the simplified expression, or NULL if nothing could be done. */
506 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
509 bool volatile_p = TREE_THIS_VOLATILE (expr);
510 location_t loc = EXPR_LOCATION (expr);
512 /* We may well have constructed a double-nested PLUS_EXPR via multiple
513 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
514 are sometimes added. */
516 STRIP_TYPE_NOPS (base);
517 TREE_OPERAND (expr, 0) = base;
519 /* One possibility is that the address reduces to a string constant. */
520 t = fold_read_from_constant_string (expr);
524 /* Add in any offset from a POINTER_PLUS_EXPR. */
525 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
529 offset2 = TREE_OPERAND (base, 1);
530 if (TREE_CODE (offset2) != INTEGER_CST)
532 base = TREE_OPERAND (base, 0);
534 offset = fold_convert (sizetype,
535 int_const_binop (PLUS_EXPR, offset, offset2, 1));
538 if (TREE_CODE (base) == ADDR_EXPR)
540 tree base_addr = base;
542 /* Strip the ADDR_EXPR. */
543 base = TREE_OPERAND (base, 0);
545 /* Fold away CONST_DECL to its value, if the type is scalar. */
546 if (TREE_CODE (base) == CONST_DECL
547 && is_gimple_min_invariant (DECL_INITIAL (base)))
548 return DECL_INITIAL (base);
550 /* If there is no offset involved simply return the folded base. */
551 if (integer_zerop (offset))
554 /* Try folding *(&B+O) to B.X. */
555 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
559 /* Preserve volatileness of the original expression.
560 We can end up with a plain decl here which is shared
561 and we shouldn't mess with its flags. */
563 TREE_THIS_VOLATILE (t) = volatile_p;
569 /* We can get here for out-of-range string constant accesses,
570 such as "_"[3]. Bail out of the entire substitution search
571 and arrange for the entire statement to be replaced by a
572 call to __builtin_trap. In all likelihood this will all be
573 constant-folded away, but in the meantime we can't leave with
574 something that get_expr_operands can't understand. */
578 if (TREE_CODE (t) == ADDR_EXPR
579 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
581 /* FIXME: Except that this causes problems elsewhere with dead
582 code not being deleted, and we die in the rtl expanders
583 because we failed to remove some ssa_name. In the meantime,
585 /* FIXME2: This condition should be signaled by
586 fold_read_from_constant_string directly, rather than
587 re-checking for it here. */
588 return integer_zero_node;
591 /* Try folding *(B+O) to B->X. Still an improvement. */
592 if (POINTER_TYPE_P (TREE_TYPE (base)))
594 t = maybe_fold_offset_to_reference (loc, base, offset,
601 /* Otherwise we had an offset that we could not simplify. */
606 /* A quaint feature extant in our address arithmetic is that there
607 can be hidden type changes here. The type of the result need
608 not be the same as the type of the input pointer.
610 What we're after here is an expression of the form
611 (T *)(&array + const)
612 where array is OP0, const is OP1, RES_TYPE is T and
613 the cast doesn't actually exist, but is implicit in the
614 type of the POINTER_PLUS_EXPR. We'd like to turn this into
616 which may be able to propagate further. */
619 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
624 /* The first operand should be an ADDR_EXPR. */
625 if (TREE_CODE (op0) != ADDR_EXPR)
627 op0 = TREE_OPERAND (op0, 0);
629 /* It had better be a constant. */
630 if (TREE_CODE (op1) != INTEGER_CST)
632 /* Or op0 should now be A[0] and the non-constant offset defined
633 via a multiplication by the array element size. */
634 if (TREE_CODE (op0) == ARRAY_REF
635 && integer_zerop (TREE_OPERAND (op0, 1))
636 && TREE_CODE (op1) == SSA_NAME
637 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
639 gimple offset_def = SSA_NAME_DEF_STMT (op1);
640 if (!is_gimple_assign (offset_def))
643 /* As we will end up creating a variable index array access
644 in the outermost array dimension make sure there isn't
645 a more inner array that the index could overflow to. */
646 if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
649 /* Do not build array references of something that we can't
650 see the true number of array dimensions for. */
651 if (!DECL_P (TREE_OPERAND (op0, 0))
652 && !handled_component_p (TREE_OPERAND (op0, 0)))
655 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
656 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
657 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
658 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
659 return build_fold_addr_expr
660 (build4 (ARRAY_REF, TREE_TYPE (op0),
661 TREE_OPERAND (op0, 0),
662 gimple_assign_rhs1 (offset_def),
663 TREE_OPERAND (op0, 2),
664 TREE_OPERAND (op0, 3)));
665 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
666 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
667 return build_fold_addr_expr
668 (build4 (ARRAY_REF, TREE_TYPE (op0),
669 TREE_OPERAND (op0, 0),
671 TREE_OPERAND (op0, 2),
672 TREE_OPERAND (op0, 3)));
677 /* If the first operand is an ARRAY_REF, expand it so that we can fold
678 the offset into it. */
679 while (TREE_CODE (op0) == ARRAY_REF)
681 tree array_obj = TREE_OPERAND (op0, 0);
682 tree array_idx = TREE_OPERAND (op0, 1);
683 tree elt_type = TREE_TYPE (op0);
684 tree elt_size = TYPE_SIZE_UNIT (elt_type);
687 if (TREE_CODE (array_idx) != INTEGER_CST)
689 if (TREE_CODE (elt_size) != INTEGER_CST)
692 /* Un-bias the index by the min index of the array type. */
693 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
696 min_idx = TYPE_MIN_VALUE (min_idx);
699 if (TREE_CODE (min_idx) != INTEGER_CST)
702 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
703 if (!integer_zerop (min_idx))
704 array_idx = int_const_binop (MINUS_EXPR, array_idx,
709 /* Convert the index to a byte offset. */
710 array_idx = fold_convert (sizetype, array_idx);
711 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
713 /* Update the operands for the next round, or for folding. */
714 op1 = int_const_binop (PLUS_EXPR,
719 ptd_type = TREE_TYPE (res_type);
720 /* If we want a pointer to void, reconstruct the reference from the
721 array element type. A pointer to that can be trivially converted
722 to void *. This happens as we fold (void *)(ptr p+ off). */
723 if (VOID_TYPE_P (ptd_type)
724 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
725 ptd_type = TREE_TYPE (TREE_TYPE (op0));
727 /* At which point we can try some of the same things as for indirects. */
728 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
730 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
734 t = build1 (ADDR_EXPR, res_type, t);
735 SET_EXPR_LOCATION (t, loc);
741 /* Subroutine of fold_stmt. We perform several simplifications of the
742 memory reference tree EXPR and make sure to re-gimplify them properly
743 after propagation of constant addresses. IS_LHS is true if the
744 reference is supposed to be an lvalue. */
747 maybe_fold_reference (tree expr, bool is_lhs)
751 if (TREE_CODE (expr) == ARRAY_REF
754 tree tem = fold_read_from_constant_string (expr);
759 /* ??? We might want to open-code the relevant remaining cases
760 to avoid using the generic fold. */
761 if (handled_component_p (*t)
762 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
764 tree tem = fold (*t);
769 while (handled_component_p (*t))
770 t = &TREE_OPERAND (*t, 0);
772 if (TREE_CODE (*t) == INDIRECT_REF)
774 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
776 /* Avoid folding *"abc" = 5 into 'a' = 5. */
777 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
780 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
781 /* If we had a good reason for propagating the address here,
782 make sure we end up with valid gimple. See PR34989. */
783 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
788 tem = maybe_fold_reference (expr, is_lhs);
797 tree tem = get_symbol_constant_value (*t);
799 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
801 *t = unshare_expr (tem);
802 tem = maybe_fold_reference (expr, is_lhs);
813 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
814 replacement rhs for the statement or NULL_TREE if no simplification
815 could be made. It is assumed that the operands have been previously
819 fold_gimple_assign (gimple_stmt_iterator *si)
821 gimple stmt = gsi_stmt (*si);
822 enum tree_code subcode = gimple_assign_rhs_code (stmt);
823 location_t loc = gimple_location (stmt);
825 tree result = NULL_TREE;
827 switch (get_gimple_rhs_class (subcode))
829 case GIMPLE_SINGLE_RHS:
831 tree rhs = gimple_assign_rhs1 (stmt);
833 /* Try to fold a conditional expression. */
834 if (TREE_CODE (rhs) == COND_EXPR)
836 tree op0 = COND_EXPR_COND (rhs);
839 location_t cond_loc = EXPR_LOCATION (rhs);
841 if (COMPARISON_CLASS_P (op0))
843 fold_defer_overflow_warnings ();
844 tem = fold_binary_loc (cond_loc,
845 TREE_CODE (op0), TREE_TYPE (op0),
846 TREE_OPERAND (op0, 0),
847 TREE_OPERAND (op0, 1));
848 /* This is actually a conditional expression, not a GIMPLE
849 conditional statement, however, the valid_gimple_rhs_p
850 test still applies. */
851 set = (tem && is_gimple_condexpr (tem)
852 && valid_gimple_rhs_p (tem));
853 fold_undefer_overflow_warnings (set, stmt, 0);
855 else if (is_gimple_min_invariant (op0))
864 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
865 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
868 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
869 return maybe_fold_tmr (rhs);
871 else if (REFERENCE_CLASS_P (rhs))
872 return maybe_fold_reference (rhs, false);
874 else if (TREE_CODE (rhs) == ADDR_EXPR)
876 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
878 result = fold_convert (TREE_TYPE (rhs),
879 build_fold_addr_expr_loc (loc, tem));
882 else if (TREE_CODE (rhs) == CONSTRUCTOR
883 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
884 && (CONSTRUCTOR_NELTS (rhs)
885 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
887 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
891 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
892 if (TREE_CODE (val) != INTEGER_CST
893 && TREE_CODE (val) != REAL_CST
894 && TREE_CODE (val) != FIXED_CST)
897 return build_vector_from_ctor (TREE_TYPE (rhs),
898 CONSTRUCTOR_ELTS (rhs));
901 else if (DECL_P (rhs))
902 return unshare_expr (get_symbol_constant_value (rhs));
904 /* If we couldn't fold the RHS, hand over to the generic
906 if (result == NULL_TREE)
909 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
910 that may have been added by fold, and "useless" type
911 conversions that might now be apparent due to propagation. */
912 STRIP_USELESS_TYPE_CONVERSION (result);
914 if (result != rhs && valid_gimple_rhs_p (result))
921 case GIMPLE_UNARY_RHS:
923 tree rhs = gimple_assign_rhs1 (stmt);
925 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
928 /* If the operation was a conversion do _not_ mark a
929 resulting constant with TREE_OVERFLOW if the original
930 constant was not. These conversions have implementation
931 defined behavior and retaining the TREE_OVERFLOW flag
932 here would confuse later passes such as VRP. */
933 if (CONVERT_EXPR_CODE_P (subcode)
934 && TREE_CODE (result) == INTEGER_CST
935 && TREE_CODE (rhs) == INTEGER_CST)
936 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
938 STRIP_USELESS_TYPE_CONVERSION (result);
939 if (valid_gimple_rhs_p (result))
942 else if (CONVERT_EXPR_CODE_P (subcode)
943 && POINTER_TYPE_P (gimple_expr_type (stmt))
944 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
946 tree type = gimple_expr_type (stmt);
947 tree t = maybe_fold_offset_to_address (loc,
948 gimple_assign_rhs1 (stmt),
949 integer_zero_node, type);
956 case GIMPLE_BINARY_RHS:
957 /* Try to fold pointer addition. */
958 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
960 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
961 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
963 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
964 if (!useless_type_conversion_p
965 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
966 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
968 result = maybe_fold_stmt_addition (gimple_location (stmt),
970 gimple_assign_rhs1 (stmt),
971 gimple_assign_rhs2 (stmt));
975 result = fold_binary_loc (loc, subcode,
976 TREE_TYPE (gimple_assign_lhs (stmt)),
977 gimple_assign_rhs1 (stmt),
978 gimple_assign_rhs2 (stmt));
982 STRIP_USELESS_TYPE_CONVERSION (result);
983 if (valid_gimple_rhs_p (result))
986 /* Fold might have produced non-GIMPLE, so if we trust it blindly
987 we lose canonicalization opportunities. Do not go again
988 through fold here though, or the same non-GIMPLE will be
990 if (commutative_tree_code (subcode)
991 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
992 gimple_assign_rhs2 (stmt), false))
993 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
994 gimple_assign_rhs2 (stmt),
995 gimple_assign_rhs1 (stmt));
999 case GIMPLE_INVALID_RHS:
1006 /* Attempt to fold a conditional statement. Return true if any changes were
1007 made. We only attempt to fold the condition expression, and do not perform
1008 any transformation that would require alteration of the cfg. It is
1009 assumed that the operands have been previously folded. */
1012 fold_gimple_cond (gimple stmt)
1014 tree result = fold_binary_loc (gimple_location (stmt),
1015 gimple_cond_code (stmt),
1017 gimple_cond_lhs (stmt),
1018 gimple_cond_rhs (stmt));
1022 STRIP_USELESS_TYPE_CONVERSION (result);
1023 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
1025 gimple_cond_set_condition_from_tree (stmt, result);
1033 /* Convert EXPR into a GIMPLE value suitable for substitution on the
1034 RHS of an assignment. Insert the necessary statements before
1035 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
1036 is replaced. If the call is expected to produces a result, then it
1037 is replaced by an assignment of the new RHS to the result variable.
1038 If the result is to be ignored, then the call is replaced by a
1042 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
1045 tree tmp = NULL_TREE; /* Silence warning. */
1046 gimple stmt, new_stmt;
1047 gimple_stmt_iterator i;
1048 gimple_seq stmts = gimple_seq_alloc();
1049 struct gimplify_ctx gctx;
1052 stmt = gsi_stmt (*si_p);
1054 gcc_assert (is_gimple_call (stmt));
1056 lhs = gimple_call_lhs (stmt);
1058 push_gimplify_context (&gctx);
1060 if (lhs == NULL_TREE)
1061 gimplify_and_add (expr, &stmts);
1063 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
1065 pop_gimplify_context (NULL);
1067 if (gimple_has_location (stmt))
1068 annotate_all_with_location (stmts, gimple_location (stmt));
1070 /* The replacement can expose previously unreferenced variables. */
1071 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
1075 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1078 new_stmt = gsi_stmt (i);
1079 find_new_referenced_vars (new_stmt);
1080 mark_symbols_for_renaming (new_stmt);
1084 if (lhs == NULL_TREE)
1086 unlink_stmt_vdef (stmt);
1087 release_defs (stmt);
1094 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1097 new_stmt = gimple_build_assign (lhs, tmp);
1098 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1099 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1100 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
1103 gimple_set_location (new_stmt, gimple_location (stmt));
1104 gsi_replace (si_p, new_stmt, false);
1107 /* Return the string length, maximum string length or maximum value of
1109 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1110 is not NULL and, for TYPE == 0, its value is not equal to the length
1111 we determine or if we are unable to determine the length or value,
1112 return false. VISITED is a bitmap of visited variables.
1113 TYPE is 0 if string length should be returned, 1 for maximum string
1114 length and 2 for maximum value ARG can have. */
1117 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1122 if (TREE_CODE (arg) != SSA_NAME)
1124 if (TREE_CODE (arg) == COND_EXPR)
1125 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1126 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1127 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1128 else if (TREE_CODE (arg) == ADDR_EXPR
1129 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1130 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1132 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1133 if (TREE_CODE (aop0) == INDIRECT_REF
1134 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1135 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1136 length, visited, type);
1142 if (TREE_CODE (val) != INTEGER_CST
1143 || tree_int_cst_sgn (val) < 0)
1147 val = c_strlen (arg, 1);
1155 if (TREE_CODE (*length) != INTEGER_CST
1156 || TREE_CODE (val) != INTEGER_CST)
1159 if (tree_int_cst_lt (*length, val))
1163 else if (simple_cst_equal (val, *length) != 1)
1171 /* If we were already here, break the infinite cycle. */
1172 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1174 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1177 def_stmt = SSA_NAME_DEF_STMT (var);
1179 switch (gimple_code (def_stmt))
1182 /* The RHS of the statement defining VAR must either have a
1183 constant length or come from another SSA_NAME with a constant
1185 if (gimple_assign_single_p (def_stmt)
1186 || gimple_assign_unary_nop_p (def_stmt))
1188 tree rhs = gimple_assign_rhs1 (def_stmt);
1189 return get_maxval_strlen (rhs, length, visited, type);
1195 /* All the arguments of the PHI node must have the same constant
1199 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1201 tree arg = gimple_phi_arg (def_stmt, i)->def;
1203 /* If this PHI has itself as an argument, we cannot
1204 determine the string length of this argument. However,
1205 if we can find a constant string length for the other
1206 PHI args then we can still be sure that this is a
1207 constant string length. So be optimistic and just
1208 continue with the next argument. */
1209 if (arg == gimple_phi_result (def_stmt))
1212 if (!get_maxval_strlen (arg, length, visited, type))
1224 /* Fold builtin call in statement STMT. Returns a simplified tree.
1225 We may return a non-constant expression, including another call
1226 to a different function and with different arguments, e.g.,
1227 substituting memcpy for strcpy when the string length is known.
1228 Note that some builtins expand into inline code that may not
1229 be valid in GIMPLE. Callers must take care. */
1232 gimple_fold_builtin (gimple stmt)
1234 tree result, val[3];
1240 location_t loc = gimple_location (stmt);
1242 gcc_assert (is_gimple_call (stmt));
1244 ignore = (gimple_call_lhs (stmt) == NULL);
1246 /* First try the generic builtin folder. If that succeeds, return the
1248 result = fold_call_stmt (stmt, ignore);
1252 STRIP_NOPS (result);
1256 /* Ignore MD builtins. */
1257 callee = gimple_call_fndecl (stmt);
1258 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1261 /* If the builtin could not be folded, and it has no argument list,
1263 nargs = gimple_call_num_args (stmt);
1267 /* Limit the work only for builtins we know how to simplify. */
1268 switch (DECL_FUNCTION_CODE (callee))
1270 case BUILT_IN_STRLEN:
1271 case BUILT_IN_FPUTS:
1272 case BUILT_IN_FPUTS_UNLOCKED:
1276 case BUILT_IN_STRCPY:
1277 case BUILT_IN_STRNCPY:
1281 case BUILT_IN_MEMCPY_CHK:
1282 case BUILT_IN_MEMPCPY_CHK:
1283 case BUILT_IN_MEMMOVE_CHK:
1284 case BUILT_IN_MEMSET_CHK:
1285 case BUILT_IN_STRNCPY_CHK:
1289 case BUILT_IN_STRCPY_CHK:
1290 case BUILT_IN_STPCPY_CHK:
1294 case BUILT_IN_SNPRINTF_CHK:
1295 case BUILT_IN_VSNPRINTF_CHK:
1303 if (arg_idx >= nargs)
1306 /* Try to use the dataflow information gathered by the CCP process. */
1307 visited = BITMAP_ALLOC (NULL);
1308 bitmap_clear (visited);
1310 memset (val, 0, sizeof (val));
1311 a = gimple_call_arg (stmt, arg_idx);
1312 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1313 val[arg_idx] = NULL_TREE;
1315 BITMAP_FREE (visited);
1318 switch (DECL_FUNCTION_CODE (callee))
1320 case BUILT_IN_STRLEN:
1321 if (val[0] && nargs == 1)
1324 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1326 /* If the result is not a valid gimple value, or not a cast
1327 of a valid gimple value, then we can not use the result. */
1328 if (is_gimple_val (new_val)
1329 || (is_gimple_cast (new_val)
1330 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1335 case BUILT_IN_STRCPY:
1336 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1337 result = fold_builtin_strcpy (loc, callee,
1338 gimple_call_arg (stmt, 0),
1339 gimple_call_arg (stmt, 1),
1343 case BUILT_IN_STRNCPY:
1344 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1345 result = fold_builtin_strncpy (loc, callee,
1346 gimple_call_arg (stmt, 0),
1347 gimple_call_arg (stmt, 1),
1348 gimple_call_arg (stmt, 2),
1352 case BUILT_IN_FPUTS:
1354 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1355 gimple_call_arg (stmt, 1),
1356 ignore, false, val[0]);
1359 case BUILT_IN_FPUTS_UNLOCKED:
1361 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1362 gimple_call_arg (stmt, 1),
1363 ignore, true, val[0]);
1366 case BUILT_IN_MEMCPY_CHK:
1367 case BUILT_IN_MEMPCPY_CHK:
1368 case BUILT_IN_MEMMOVE_CHK:
1369 case BUILT_IN_MEMSET_CHK:
1370 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1371 result = fold_builtin_memory_chk (loc, callee,
1372 gimple_call_arg (stmt, 0),
1373 gimple_call_arg (stmt, 1),
1374 gimple_call_arg (stmt, 2),
1375 gimple_call_arg (stmt, 3),
1377 DECL_FUNCTION_CODE (callee));
1380 case BUILT_IN_STRCPY_CHK:
1381 case BUILT_IN_STPCPY_CHK:
1382 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1383 result = fold_builtin_stxcpy_chk (loc, callee,
1384 gimple_call_arg (stmt, 0),
1385 gimple_call_arg (stmt, 1),
1386 gimple_call_arg (stmt, 2),
1388 DECL_FUNCTION_CODE (callee));
1391 case BUILT_IN_STRNCPY_CHK:
1392 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1393 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1394 gimple_call_arg (stmt, 1),
1395 gimple_call_arg (stmt, 2),
1396 gimple_call_arg (stmt, 3),
1400 case BUILT_IN_SNPRINTF_CHK:
1401 case BUILT_IN_VSNPRINTF_CHK:
1402 if (val[1] && is_gimple_val (val[1]))
1403 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1404 DECL_FUNCTION_CODE (callee));
1411 if (result && ignore)
1412 result = fold_ignored_result (result);
1416 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1417 it is found or NULL_TREE if it is not. */
1420 get_base_binfo_for_type (tree binfo, tree type)
1424 tree res = NULL_TREE;
1426 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1427 if (TREE_TYPE (base_binfo) == type)
1436 /* Return a binfo describing the part of object referenced by expression REF.
1437 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1438 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1439 a simple declaration, indirect reference or an SSA_NAME. If the function
1440 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1441 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1442 Otherwise the first non-artificial field declaration or the base declaration
1443 will be examined to get the encapsulating type. */
1446 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1450 if (TREE_CODE (ref) == COMPONENT_REF)
1453 tree binfo, base_binfo;
1454 tree field = TREE_OPERAND (ref, 1);
1456 if (!DECL_ARTIFICIAL (field))
1458 tree type = TREE_TYPE (field);
1459 if (TREE_CODE (type) == RECORD_TYPE)
1460 return TYPE_BINFO (type);
1465 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1466 binfo = TYPE_BINFO (par_type);
1468 || BINFO_N_BASE_BINFOS (binfo) == 0)
1471 base_binfo = BINFO_BASE_BINFO (binfo, 0);
1472 if (BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1476 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1478 /* Get descendant binfo. */
1481 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1484 ref = TREE_OPERAND (ref, 0);
1486 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1487 return TYPE_BINFO (TREE_TYPE (ref));
1488 else if (known_binfo
1489 && (TREE_CODE (ref) == SSA_NAME
1490 || TREE_CODE (ref) == INDIRECT_REF))
1497 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1498 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1499 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1502 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1507 v = BINFO_VIRTUALS (known_binfo);
1511 i += (TARGET_VTABLE_USES_DESCRIPTORS
1512 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1516 fndecl = TREE_VALUE (v);
1517 return build_fold_addr_expr (fndecl);
1521 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1522 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1523 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1524 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1525 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1528 gimple_fold_obj_type_ref (tree ref, tree known_type)
1530 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1531 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1534 if (TREE_CODE (obj) == ADDR_EXPR)
1535 obj = TREE_OPERAND (obj, 0);
1537 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1540 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1541 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1547 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1548 The statement may be replaced by another statement, e.g., if the call
1549 simplifies to a constant value. Return true if any changes were made.
1550 It is assumed that the operands have been previously folded. */
1553 fold_gimple_call (gimple_stmt_iterator *gsi)
1555 gimple stmt = gsi_stmt (*gsi);
1557 tree callee = gimple_call_fndecl (stmt);
1559 /* Check for builtins that CCP can handle using information not
1560 available in the generic fold routines. */
1561 if (callee && DECL_BUILT_IN (callee))
1563 tree result = gimple_fold_builtin (stmt);
1567 if (!update_call_from_tree (gsi, result))
1568 gimplify_and_update_call_from_tree (gsi, result);
1574 /* ??? Should perhaps do this in fold proper. However, doing it
1575 there requires that we create a new CALL_EXPR, and that requires
1576 copying EH region info to the new node. Easier to just do it
1577 here where we can just smash the call operand. */
1578 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1579 callee = gimple_call_fn (stmt);
1580 if (TREE_CODE (callee) == OBJ_TYPE_REF
1581 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1585 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1588 gimple_call_set_fn (stmt, t);
1597 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1598 distinguishes both cases. */
1601 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1603 bool changed = false;
1604 gimple stmt = gsi_stmt (*gsi);
1607 /* Fold the main computation performed by the statement. */
1608 switch (gimple_code (stmt))
1612 unsigned old_num_ops = gimple_num_ops (stmt);
1613 tree new_rhs = fold_gimple_assign (gsi);
1614 tree lhs = gimple_assign_lhs (stmt);
1616 && !useless_type_conversion_p (TREE_TYPE (lhs),
1617 TREE_TYPE (new_rhs)))
1618 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1621 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1623 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1630 changed |= fold_gimple_cond (stmt);
1634 /* Fold *& in call arguments. */
1635 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1636 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1638 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1641 gimple_call_set_arg (stmt, i, tmp);
1645 /* The entire statement may be replaced in this case. */
1647 changed |= fold_gimple_call (gsi);
1651 /* Fold *& in asm operands. */
1652 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1654 tree link = gimple_asm_output_op (stmt, i);
1655 tree op = TREE_VALUE (link);
1656 if (REFERENCE_CLASS_P (op)
1657 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1659 TREE_VALUE (link) = op;
1663 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1665 tree link = gimple_asm_input_op (stmt, i);
1666 tree op = TREE_VALUE (link);
1667 if (REFERENCE_CLASS_P (op)
1668 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1670 TREE_VALUE (link) = op;
1679 stmt = gsi_stmt (*gsi);
1681 /* Fold *& on the lhs. */
1682 if (gimple_has_lhs (stmt))
1684 tree lhs = gimple_get_lhs (stmt);
1685 if (lhs && REFERENCE_CLASS_P (lhs))
1687 tree new_lhs = maybe_fold_reference (lhs, true);
1690 gimple_set_lhs (stmt, new_lhs);
1699 /* Fold the statement pointed to by GSI. In some cases, this function may
1700 replace the whole statement with a new one. Returns true iff folding
1702 The statement pointed to by GSI should be in valid gimple form but may
1703 be in unfolded state as resulting from for example constant propagation
1704 which can produce *&x = 0. */
1707 fold_stmt (gimple_stmt_iterator *gsi)
1709 return fold_stmt_1 (gsi, false);
1712 /* Perform the minimal folding on statement STMT. Only operations like
1713 *&x created by constant propagation are handled. The statement cannot
1714 be replaced with a new one. Return true if the statement was
1715 changed, false otherwise.
1716 The statement STMT should be in valid gimple form but may
1717 be in unfolded state as resulting from for example constant propagation
1718 which can produce *&x = 0. */
1721 fold_stmt_inplace (gimple stmt)
1723 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1724 bool changed = fold_stmt_1 (&gsi, true);
1725 gcc_assert (gsi_stmt (gsi) == stmt);