1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name, tree *final_name_p)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
198 *final_name_p = name;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
261 can_propagate_from (gimple def_stmt)
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
302 /* Remove a copy chain ending in NAME along the defs but not
303 further or including UP_TO_STMT. If NAME was replaced in
304 its only use then this function can be used to clean up
305 dead stmts. Returns true if UP_TO_STMT can be removed
306 as well, otherwise false. */
309 remove_prop_source_from_use (tree name, gimple up_to_stmt)
311 gimple_stmt_iterator gsi;
315 if (!has_zero_uses (name))
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (stmt == up_to_stmt)
322 gsi = gsi_for_stmt (stmt);
324 gsi_remove (&gsi, true);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
345 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
348 return build1 (code, type, gimple_assign_rhs1 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
350 return gimple_assign_rhs1 (stmt);
355 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
356 the folded result in a form suitable for COND_EXPR_COND or
357 NULL_TREE, if there is no suitable simplified form. If
358 INVARIANT_ONLY is true only gimple_min_invariant results are
359 considered simplified. */
362 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
363 tree op0, tree op1, bool invariant_only)
367 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
369 t = fold_binary_loc (loc, code, type, op0, op1);
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
386 /* Propagate from the ssa name definition statements of COND_EXPR
387 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
388 Returns zero if no statement was changed, one if there were
389 changes and two if cfg_cleanup needs to run.
391 This must be kept in sync with forward_propagate_into_cond. */
394 forward_propagate_into_gimple_cond (gimple stmt)
396 int did_something = 0;
397 location_t loc = gimple_location (stmt);
400 tree tmp = NULL_TREE;
401 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
403 bool single_use0_p = false, single_use1_p = false;
404 enum tree_code code = gimple_cond_code (stmt);
406 /* We can do tree combining on SSA_NAME and comparison expressions. */
407 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
409 /* For comparisons use the first operand, that is likely to
410 simplify comparisons against constants. */
411 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
413 name = gimple_cond_lhs (stmt);
414 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
415 if (def_stmt && can_propagate_from (def_stmt))
417 tree op1 = gimple_cond_rhs (stmt);
418 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
419 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
420 rhs0, op1, !single_use0_p);
423 /* If that wasn't successful, try the second operand. */
425 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
427 tree op0 = gimple_cond_lhs (stmt);
428 name = gimple_cond_rhs (stmt);
429 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
430 if (!def_stmt || !can_propagate_from (def_stmt))
431 return did_something;
433 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
434 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
435 rhs1, !single_use1_p);
437 /* If that wasn't successful either, try both operands. */
440 && rhs1 != NULL_TREE)
441 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
442 fold_convert_loc (loc,
445 !(single_use0_p && single_use1_p));
450 if (dump_file && tmp)
452 tree cond = build2 (gimple_cond_code (stmt),
454 gimple_cond_lhs (stmt),
455 gimple_cond_rhs (stmt));
456 fprintf (dump_file, " Replaced '");
457 print_generic_expr (dump_file, cond, 0);
458 fprintf (dump_file, "' with '");
459 print_generic_expr (dump_file, tmp, 0);
460 fprintf (dump_file, "'\n");
463 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
466 /* Remove defining statements. */
467 remove_prop_source_from_use (name, NULL);
469 if (is_gimple_min_invariant (tmp))
471 else if (did_something == 0)
474 /* Continue combining. */
481 return did_something;
485 /* Propagate from the ssa name definition statements of COND_EXPR
486 in the rhs of statement STMT into the conditional if that simplifies it.
487 Returns zero if no statement was changed, one if there were
488 changes and two if cfg_cleanup needs to run.
490 This must be kept in sync with forward_propagate_into_gimple_cond. */
493 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
495 gimple stmt = gsi_stmt (*gsi_p);
496 location_t loc = gimple_location (stmt);
497 int did_something = 0;
500 tree tmp = NULL_TREE;
501 tree cond = gimple_assign_rhs1 (stmt);
502 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
504 bool single_use0_p = false, single_use1_p = false;
506 /* We can do tree combining on SSA_NAME and comparison expressions. */
507 if (COMPARISON_CLASS_P (cond)
508 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
510 /* For comparisons use the first operand, that is likely to
511 simplify comparisons against constants. */
512 name = TREE_OPERAND (cond, 0);
513 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
514 if (def_stmt && can_propagate_from (def_stmt))
516 tree op1 = TREE_OPERAND (cond, 1);
517 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
518 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
520 rhs0, op1, !single_use0_p);
522 /* If that wasn't successful, try the second operand. */
524 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
526 tree op0 = TREE_OPERAND (cond, 0);
527 name = TREE_OPERAND (cond, 1);
528 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
529 if (!def_stmt || !can_propagate_from (def_stmt))
530 return did_something;
532 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
533 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
535 op0, rhs1, !single_use1_p);
537 /* If that wasn't successful either, try both operands. */
540 && rhs1 != NULL_TREE)
541 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
544 fold_convert_loc (loc,
547 !(single_use0_p && single_use1_p));
549 else if (TREE_CODE (cond) == SSA_NAME)
552 def_stmt = get_prop_source_stmt (name, true, NULL);
553 if (def_stmt || !can_propagate_from (def_stmt))
554 return did_something;
556 rhs0 = gimple_assign_rhs1 (def_stmt);
557 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
558 build_int_cst (TREE_TYPE (rhs0), 0),
564 if (dump_file && tmp)
566 fprintf (dump_file, " Replaced '");
567 print_generic_expr (dump_file, cond, 0);
568 fprintf (dump_file, "' with '");
569 print_generic_expr (dump_file, tmp, 0);
570 fprintf (dump_file, "'\n");
573 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
574 stmt = gsi_stmt (*gsi_p);
577 /* Remove defining statements. */
578 remove_prop_source_from_use (name, NULL);
580 if (is_gimple_min_invariant (tmp))
582 else if (did_something == 0)
585 /* Continue combining. */
592 return did_something;
595 /* We've just substituted an ADDR_EXPR into stmt. Update all the
596 relevant data structures to match. */
599 tidy_after_forward_propagate_addr (gimple stmt)
601 /* We may have turned a trapping insn into a non-trapping insn. */
602 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
603 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
606 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
607 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
610 /* DEF_RHS contains the address of the 0th element in an array.
611 USE_STMT uses type of DEF_RHS to compute the address of an
612 arbitrary element within the array. The (variable) byte offset
613 of the element is contained in OFFSET.
615 We walk back through the use-def chains of OFFSET to verify that
616 it is indeed computing the offset of an element within the array
617 and extract the index corresponding to the given byte offset.
619 We then try to fold the entire address expression into a form
622 If we are successful, we replace the right hand side of USE_STMT
623 with the new address computation. */
626 forward_propagate_addr_into_variable_array_index (tree offset,
628 gimple_stmt_iterator *use_stmt_gsi)
631 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
634 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
635 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
636 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
637 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
640 if (!host_integerp (tunit, 1))
643 /* Get the offset's defining statement. */
644 offset_def = SSA_NAME_DEF_STMT (offset);
646 /* Try to find an expression for a proper index. This is either a
647 multiplication expression by the element size or just the ssa name we came
648 along in case the element size is one. In that case, however, we do not
649 allow multiplications because they can be computing index to a higher
650 level dimension (PR 37861). */
651 if (integer_onep (tunit))
653 if (is_gimple_assign (offset_def)
654 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
661 /* The statement which defines OFFSET before type conversion
662 must be a simple GIMPLE_ASSIGN. */
663 if (!is_gimple_assign (offset_def))
666 /* The RHS of the statement which defines OFFSET must be a
667 multiplication of an object by the size of the array elements.
668 This implicitly verifies that the size of the array elements
670 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
671 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
672 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
674 /* The first operand to the MULT_EXPR is the desired index. */
675 index = gimple_assign_rhs1 (offset_def);
677 /* If we have idx * tunit + CST * tunit re-associate that. */
678 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
679 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
680 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
681 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
682 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
683 gimple_assign_rhs2 (offset_def),
684 tunit)) != NULL_TREE)
686 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
687 if (is_gimple_assign (offset_def2)
688 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
689 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
690 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
692 index = fold_build2 (gimple_assign_rhs_code (offset_def),
694 gimple_assign_rhs1 (offset_def2), tmp);
703 /* Replace the pointer addition with array indexing. */
704 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
705 true, GSI_SAME_STMT);
706 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
708 new_rhs = unshare_expr (def_rhs);
709 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
713 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
714 unshare_expr (TREE_OPERAND (def_rhs, 0)),
715 index, integer_zero_node, NULL_TREE);
716 new_rhs = build_fold_addr_expr (new_rhs);
717 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
718 TREE_TYPE (new_rhs)))
720 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
721 NULL_TREE, true, GSI_SAME_STMT);
722 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
726 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
727 use_stmt = gsi_stmt (*use_stmt_gsi);
729 /* That should have created gimple, so there is no need to
730 record information to undo the propagation. */
731 fold_stmt_inplace (use_stmt);
732 tidy_after_forward_propagate_addr (use_stmt);
736 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
737 ADDR_EXPR <whatever>.
739 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
740 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
741 node or for recovery of array indexing from pointer arithmetic.
743 Return true if the propagation was successful (the propagation can
744 be not totally successful, yet things may have been changed). */
747 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
748 gimple_stmt_iterator *use_stmt_gsi,
751 tree lhs, rhs, rhs2, array_ref;
752 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
753 enum tree_code rhs_code;
756 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
758 lhs = gimple_assign_lhs (use_stmt);
759 rhs_code = gimple_assign_rhs_code (use_stmt);
760 rhs = gimple_assign_rhs1 (use_stmt);
762 /* Trivial cases. The use statement could be a trivial copy or a
763 useless conversion. Recurse to the uses of the lhs as copyprop does
764 not copy through different variant pointers and FRE does not catch
765 all useless conversions. Treat the case of a single-use name and
766 a conversion to def_rhs type separate, though. */
767 if (TREE_CODE (lhs) == SSA_NAME
768 && ((rhs_code == SSA_NAME && rhs == name)
769 || CONVERT_EXPR_CODE_P (rhs_code)))
771 /* Only recurse if we don't deal with a single use or we cannot
772 do the propagation to the current statement. In particular
773 we can end up with a conversion needed for a non-invariant
774 address which we cannot do in a single statement. */
776 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
777 && (!is_gimple_min_invariant (def_rhs)
778 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
779 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
780 && (TYPE_PRECISION (TREE_TYPE (lhs))
781 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
782 return forward_propagate_addr_expr (lhs, def_rhs);
784 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
785 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
786 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
788 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
792 /* Propagate through constant pointer adjustments. */
793 if (TREE_CODE (lhs) == SSA_NAME
794 && rhs_code == POINTER_PLUS_EXPR
796 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
799 /* As we come here with non-invariant addresses in def_rhs we need
800 to make sure we can build a valid constant offsetted address
801 for further propagation. Simply rely on fold building that
802 and check after the fact. */
803 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
805 fold_convert (ptr_type_node,
806 gimple_assign_rhs2 (use_stmt)));
807 if (TREE_CODE (new_def_rhs) == MEM_REF
808 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
810 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
813 /* Recurse. If we could propagate into all uses of lhs do not
814 bother to replace into the current use but just pretend we did. */
815 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
816 && forward_propagate_addr_expr (lhs, new_def_rhs))
819 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
820 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
821 new_def_rhs, NULL_TREE);
822 else if (is_gimple_min_invariant (new_def_rhs))
823 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
824 new_def_rhs, NULL_TREE);
827 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
828 update_stmt (use_stmt);
832 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
833 ADDR_EXPR will not appear on the LHS. */
834 lhs = gimple_assign_lhs (use_stmt);
835 while (handled_component_p (lhs))
836 lhs = TREE_OPERAND (lhs, 0);
838 /* Now see if the LHS node is a MEM_REF using NAME. If so,
839 propagate the ADDR_EXPR into the use of NAME and fold the result. */
840 if (TREE_CODE (lhs) == MEM_REF
841 && TREE_OPERAND (lhs, 0) == name)
844 HOST_WIDE_INT def_rhs_offset;
845 /* If the address is invariant we can always fold it. */
846 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
849 double_int off = mem_ref_offset (lhs);
851 off = double_int_add (off,
852 shwi_to_double_int (def_rhs_offset));
853 if (TREE_CODE (def_rhs_base) == MEM_REF)
855 off = double_int_add (off, mem_ref_offset (def_rhs_base));
856 new_ptr = TREE_OPERAND (def_rhs_base, 0);
859 new_ptr = build_fold_addr_expr (def_rhs_base);
860 TREE_OPERAND (lhs, 0) = new_ptr;
861 TREE_OPERAND (lhs, 1)
862 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
863 tidy_after_forward_propagate_addr (use_stmt);
864 /* Continue propagating into the RHS if this was not the only use. */
868 /* If the LHS is a plain dereference and the value type is the same as
869 that of the pointed-to type of the address we can put the
870 dereferenced address on the LHS preserving the original alias-type. */
871 else if (gimple_assign_lhs (use_stmt) == lhs
872 && useless_type_conversion_p
873 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
874 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
876 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
877 tree new_offset, new_base, saved;
878 while (handled_component_p (*def_rhs_basep))
879 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
880 saved = *def_rhs_basep;
881 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
883 new_base = TREE_OPERAND (*def_rhs_basep, 0);
885 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
886 TREE_OPERAND (*def_rhs_basep, 1), 0);
890 new_base = build_fold_addr_expr (*def_rhs_basep);
891 new_offset = TREE_OPERAND (lhs, 1);
893 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
894 new_base, new_offset);
895 gimple_assign_set_lhs (use_stmt,
896 unshare_expr (TREE_OPERAND (def_rhs, 0)));
897 *def_rhs_basep = saved;
898 tidy_after_forward_propagate_addr (use_stmt);
899 /* Continue propagating into the RHS if this was not the
905 /* We can have a struct assignment dereferencing our name twice.
906 Note that we didn't propagate into the lhs to not falsely
907 claim we did when propagating into the rhs. */
911 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
912 nodes from the RHS. */
913 rhs = gimple_assign_rhs1 (use_stmt);
914 if (TREE_CODE (rhs) == ADDR_EXPR)
915 rhs = TREE_OPERAND (rhs, 0);
916 while (handled_component_p (rhs))
917 rhs = TREE_OPERAND (rhs, 0);
919 /* Now see if the RHS node is a MEM_REF using NAME. If so,
920 propagate the ADDR_EXPR into the use of NAME and fold the result. */
921 if (TREE_CODE (rhs) == MEM_REF
922 && TREE_OPERAND (rhs, 0) == name)
925 HOST_WIDE_INT def_rhs_offset;
926 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
929 double_int off = mem_ref_offset (rhs);
931 off = double_int_add (off,
932 shwi_to_double_int (def_rhs_offset));
933 if (TREE_CODE (def_rhs_base) == MEM_REF)
935 off = double_int_add (off, mem_ref_offset (def_rhs_base));
936 new_ptr = TREE_OPERAND (def_rhs_base, 0);
939 new_ptr = build_fold_addr_expr (def_rhs_base);
940 TREE_OPERAND (rhs, 0) = new_ptr;
941 TREE_OPERAND (rhs, 1)
942 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
943 fold_stmt_inplace (use_stmt);
944 tidy_after_forward_propagate_addr (use_stmt);
947 /* If the LHS is a plain dereference and the value type is the same as
948 that of the pointed-to type of the address we can put the
949 dereferenced address on the LHS preserving the original alias-type. */
950 else if (gimple_assign_rhs1 (use_stmt) == rhs
951 && useless_type_conversion_p
952 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
953 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
955 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
956 tree new_offset, new_base, saved;
957 while (handled_component_p (*def_rhs_basep))
958 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
959 saved = *def_rhs_basep;
960 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
962 new_base = TREE_OPERAND (*def_rhs_basep, 0);
964 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
965 TREE_OPERAND (*def_rhs_basep, 1), 0);
969 new_base = build_fold_addr_expr (*def_rhs_basep);
970 new_offset = TREE_OPERAND (rhs, 1);
972 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
973 new_base, new_offset);
974 gimple_assign_set_rhs1 (use_stmt,
975 unshare_expr (TREE_OPERAND (def_rhs, 0)));
976 *def_rhs_basep = saved;
977 fold_stmt_inplace (use_stmt);
978 tidy_after_forward_propagate_addr (use_stmt);
983 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
985 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
986 || gimple_assign_rhs1 (use_stmt) != name)
989 /* The remaining cases are all for turning pointer arithmetic into
990 array indexing. They only apply when we have the address of
991 element zero in an array. If that is not the case then there
993 array_ref = TREE_OPERAND (def_rhs, 0);
994 if ((TREE_CODE (array_ref) != ARRAY_REF
995 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
996 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
997 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1000 rhs2 = gimple_assign_rhs2 (use_stmt);
1001 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1002 of the elements in X into &x[C1 + C2/element size]. */
1003 if (TREE_CODE (rhs2) == INTEGER_CST)
1005 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1006 TREE_TYPE (def_rhs),
1010 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1011 new_rhs = unshare_expr (new_rhs);
1012 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1014 if (!is_gimple_min_invariant (new_rhs))
1015 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1017 true, GSI_SAME_STMT);
1018 new_rhs = fold_convert (type, new_rhs);
1020 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1021 use_stmt = gsi_stmt (*use_stmt_gsi);
1022 update_stmt (use_stmt);
1023 tidy_after_forward_propagate_addr (use_stmt);
1028 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1029 converting a multiplication of an index by the size of the
1030 array elements, then the result is converted into the proper
1031 type for the arithmetic. */
1032 if (TREE_CODE (rhs2) == SSA_NAME
1033 && (TREE_CODE (array_ref) != ARRAY_REF
1034 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1035 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1036 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1037 different type than their operands. */
1038 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1039 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1044 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1046 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1047 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1048 node or for recovery of array indexing from pointer arithmetic.
1049 Returns true, if all uses have been propagated into. */
1052 forward_propagate_addr_expr (tree name, tree rhs)
1054 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1055 imm_use_iterator iter;
1058 bool single_use_p = has_single_use (name);
1060 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1065 /* If the use is not in a simple assignment statement, then
1066 there is nothing we can do. */
1067 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1069 if (!is_gimple_debug (use_stmt))
1074 /* If the use is in a deeper loop nest, then we do not want
1075 to propagate non-invariant ADDR_EXPRs into the loop as that
1076 is likely adding expression evaluations into the loop. */
1077 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1078 && !is_gimple_min_invariant (rhs))
1085 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1086 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1088 /* If the use has moved to a different statement adjust
1089 the update machinery for the old statement too. */
1090 if (use_stmt != gsi_stmt (gsi))
1092 update_stmt (use_stmt);
1093 use_stmt = gsi_stmt (gsi);
1096 update_stmt (use_stmt);
1100 /* Remove intermediate now unused copy and conversion chains. */
1101 use_rhs = gimple_assign_rhs1 (use_stmt);
1103 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1104 && TREE_CODE (use_rhs) == SSA_NAME
1105 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1107 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1108 release_defs (use_stmt);
1109 gsi_remove (&gsi, true);
1116 /* Forward propagate the comparison defined in STMT like
1117 cond_1 = x CMP y to uses of the form
1121 Returns true if stmt is now unused. */
1124 forward_propagate_comparison (gimple stmt)
1126 tree name = gimple_assign_lhs (stmt);
1128 tree tmp = NULL_TREE;
1130 /* Don't propagate ssa names that occur in abnormal phis. */
1131 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1132 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1133 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1134 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1137 /* Do not un-cse comparisons. But propagate through copies. */
1138 use_stmt = get_prop_dest_stmt (name, &name);
1142 /* Conversion of the condition result to another integral type. */
1143 if (is_gimple_assign (use_stmt)
1144 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1145 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1147 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1148 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1150 tree lhs = gimple_assign_lhs (use_stmt);
1152 /* We can propagate the condition into a conversion. */
1153 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1155 /* Avoid using fold here as that may create a COND_EXPR with
1156 non-boolean condition as canonical form. */
1157 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1158 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1160 /* We can propagate the condition into X op CST where op
1161 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1162 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1164 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1165 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1167 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1168 tree cst = gimple_assign_rhs2 (use_stmt);
1171 cond = build2 (gimple_assign_rhs_code (stmt),
1173 gimple_assign_rhs1 (stmt),
1174 gimple_assign_rhs2 (stmt));
1176 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1177 code, TREE_TYPE (lhs),
1179 if (tmp == NULL_TREE)
1182 /* We can propagate the condition into a statement that
1183 computes the logical negation of the comparison result. */
1184 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1186 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1187 bool nans = HONOR_NANS (TYPE_MODE (type));
1188 enum tree_code code;
1189 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1190 if (code == ERROR_MARK)
1193 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1194 gimple_assign_rhs2 (stmt));
1200 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1201 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1202 use_stmt = gsi_stmt (gsi);
1203 update_stmt (use_stmt);
1206 /* Remove defining statements. */
1207 remove_prop_source_from_use (name, stmt);
1209 if (dump_file && (dump_flags & TDF_DETAILS))
1211 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1213 fprintf (dump_file, " Replaced '");
1214 print_generic_expr (dump_file, old_rhs, dump_flags);
1215 fprintf (dump_file, "' with '");
1216 print_generic_expr (dump_file, tmp, dump_flags);
1217 fprintf (dump_file, "'\n");
1226 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1227 If so, we can change STMT into lhs = y which can later be copy
1228 propagated. Similarly for negation.
1230 This could trivially be formulated as a forward propagation
1231 to immediate uses. However, we already had an implementation
1232 from DOM which used backward propagation via the use-def links.
1234 It turns out that backward propagation is actually faster as
1235 there's less work to do for each NOT/NEG expression we find.
1236 Backwards propagation needs to look at the statement in a single
1237 backlink. Forward propagation needs to look at potentially more
1238 than one forward link. */
1241 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1243 gimple stmt = gsi_stmt (*gsi_p);
1244 tree rhs = gimple_assign_rhs1 (stmt);
1245 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1247 /* See if the RHS_DEF_STMT has the same form as our statement. */
1248 if (is_gimple_assign (rhs_def_stmt)
1249 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1251 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1253 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1254 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1255 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1257 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1258 stmt = gsi_stmt (*gsi_p);
1264 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1265 the condition which we may be able to optimize better. */
1268 simplify_gimple_switch (gimple stmt)
1270 tree cond = gimple_switch_index (stmt);
1274 /* The optimization that we really care about is removing unnecessary
1275 casts. That will let us do much better in propagating the inferred
1276 constant at the switch target. */
1277 if (TREE_CODE (cond) == SSA_NAME)
1279 def_stmt = SSA_NAME_DEF_STMT (cond);
1280 if (is_gimple_assign (def_stmt))
1282 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1287 def = gimple_assign_rhs1 (def_stmt);
1289 /* ??? Why was Jeff testing this? We are gimple... */
1290 gcc_checking_assert (is_gimple_val (def));
1292 to = TREE_TYPE (cond);
1293 ti = TREE_TYPE (def);
1295 /* If we have an extension that preserves value, then we
1296 can copy the source value into the switch. */
1298 need_precision = TYPE_PRECISION (ti);
1300 if (! INTEGRAL_TYPE_P (ti))
1302 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1304 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1305 need_precision += 1;
1306 if (TYPE_PRECISION (to) < need_precision)
1311 gimple_switch_set_index (stmt, def);
1319 /* For pointers p2 and p1 return p2 - p1 if the
1320 difference is known and constant, otherwise return NULL. */
1323 constant_pointer_difference (tree p1, tree p2)
1326 #define CPD_ITERATIONS 5
1327 tree exps[2][CPD_ITERATIONS];
1328 tree offs[2][CPD_ITERATIONS];
1331 for (i = 0; i < 2; i++)
1333 tree p = i ? p1 : p2;
1334 tree off = size_zero_node;
1336 enum tree_code code;
1338 /* For each of p1 and p2 we need to iterate at least
1339 twice, to handle ADDR_EXPR directly in p1/p2,
1340 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1341 on definition's stmt RHS. Iterate a few extra times. */
1345 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1347 if (TREE_CODE (p) == ADDR_EXPR)
1349 tree q = TREE_OPERAND (p, 0);
1350 HOST_WIDE_INT offset;
1351 tree base = get_addr_base_and_unit_offset (q, &offset);
1356 off = size_binop (PLUS_EXPR, off, size_int (offset));
1358 if (TREE_CODE (q) == MEM_REF
1359 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1361 p = TREE_OPERAND (q, 0);
1362 off = size_binop (PLUS_EXPR, off,
1363 double_int_to_tree (sizetype,
1364 mem_ref_offset (q)));
1373 if (TREE_CODE (p) != SSA_NAME)
1377 if (j == CPD_ITERATIONS)
1379 stmt = SSA_NAME_DEF_STMT (p);
1380 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1382 code = gimple_assign_rhs_code (stmt);
1383 if (code == POINTER_PLUS_EXPR)
1385 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1387 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1388 p = gimple_assign_rhs1 (stmt);
1390 else if (code == ADDR_EXPR || code == NOP_EXPR)
1391 p = gimple_assign_rhs1 (stmt);
1399 for (i = 0; i < cnt[0]; i++)
1400 for (j = 0; j < cnt[1]; j++)
1401 if (exps[0][i] == exps[1][j])
1402 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1407 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1409 memcpy (p, "abcd", 4);
1410 memset (p + 4, ' ', 3);
1412 memcpy (p, "abcd ", 7);
1413 call if the latter can be stored by pieces during expansion. */
1416 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1418 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1419 tree vuse = gimple_vuse (stmt2);
1422 stmt1 = SSA_NAME_DEF_STMT (vuse);
1424 switch (DECL_FUNCTION_CODE (callee2))
1426 case BUILT_IN_MEMSET:
1427 if (gimple_call_num_args (stmt2) != 3
1428 || gimple_call_lhs (stmt2)
1430 || BITS_PER_UNIT != 8)
1435 tree ptr1, src1, str1, off1, len1, lhs1;
1436 tree ptr2 = gimple_call_arg (stmt2, 0);
1437 tree val2 = gimple_call_arg (stmt2, 1);
1438 tree len2 = gimple_call_arg (stmt2, 2);
1439 tree diff, vdef, new_str_cst;
1441 unsigned int ptr1_align;
1442 unsigned HOST_WIDE_INT src_len;
1444 use_operand_p use_p;
1446 if (!host_integerp (val2, 0)
1447 || !host_integerp (len2, 1))
1449 if (is_gimple_call (stmt1))
1451 /* If first stmt is a call, it needs to be memcpy
1452 or mempcpy, with string literal as second argument and
1454 callee1 = gimple_call_fndecl (stmt1);
1455 if (callee1 == NULL_TREE
1456 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1457 || gimple_call_num_args (stmt1) != 3)
1459 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1460 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1462 ptr1 = gimple_call_arg (stmt1, 0);
1463 src1 = gimple_call_arg (stmt1, 1);
1464 len1 = gimple_call_arg (stmt1, 2);
1465 lhs1 = gimple_call_lhs (stmt1);
1466 if (!host_integerp (len1, 1))
1468 str1 = string_constant (src1, &off1);
1469 if (str1 == NULL_TREE)
1471 if (!host_integerp (off1, 1)
1472 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1473 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1474 - tree_low_cst (off1, 1)) > 0
1475 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1476 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1477 != TYPE_MODE (char_type_node))
1480 else if (gimple_assign_single_p (stmt1))
1482 /* Otherwise look for length 1 memcpy optimized into
1484 ptr1 = gimple_assign_lhs (stmt1);
1485 src1 = gimple_assign_rhs1 (stmt1);
1486 if (TREE_CODE (ptr1) != MEM_REF
1487 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1488 || !host_integerp (src1, 0))
1490 ptr1 = build_fold_addr_expr (ptr1);
1491 callee1 = NULL_TREE;
1492 len1 = size_one_node;
1494 off1 = size_zero_node;
1500 diff = constant_pointer_difference (ptr1, ptr2);
1501 if (diff == NULL && lhs1 != NULL)
1503 diff = constant_pointer_difference (lhs1, ptr2);
1504 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1506 diff = size_binop (PLUS_EXPR, diff,
1507 fold_convert (sizetype, len1));
1509 /* If the difference between the second and first destination pointer
1510 is not constant, or is bigger than memcpy length, bail out. */
1512 || !host_integerp (diff, 1)
1513 || tree_int_cst_lt (len1, diff))
1516 /* Use maximum of difference plus memset length and memcpy length
1517 as the new memcpy length, if it is too big, bail out. */
1518 src_len = tree_low_cst (diff, 1);
1519 src_len += tree_low_cst (len2, 1);
1520 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1521 src_len = tree_low_cst (len1, 1);
1525 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1526 with bigger length will return different result. */
1527 if (lhs1 != NULL_TREE
1528 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1529 && (TREE_CODE (lhs1) != SSA_NAME
1530 || !single_imm_use (lhs1, &use_p, &use_stmt)
1531 || use_stmt != stmt2))
1534 /* If anything reads memory in between memcpy and memset
1535 call, the modified memcpy call might change it. */
1536 vdef = gimple_vdef (stmt1);
1538 && (!single_imm_use (vdef, &use_p, &use_stmt)
1539 || use_stmt != stmt2))
1542 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1543 /* Construct the new source string literal. */
1544 src_buf = XALLOCAVEC (char, src_len + 1);
1547 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1548 tree_low_cst (len1, 1));
1550 src_buf[0] = tree_low_cst (src1, 0);
1551 memset (src_buf + tree_low_cst (diff, 1),
1552 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1553 src_buf[src_len] = '\0';
1554 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1555 handle embedded '\0's. */
1556 if (strlen (src_buf) != src_len)
1558 rtl_profile_for_bb (gimple_bb (stmt2));
1559 /* If the new memcpy wouldn't be emitted by storing the literal
1560 by pieces, this optimization might enlarge .rodata too much,
1561 as commonly used string literals couldn't be shared any
1563 if (!can_store_by_pieces (src_len,
1564 builtin_strncpy_read_str,
1565 src_buf, ptr1_align, false))
1568 new_str_cst = build_string_literal (src_len, src_buf);
1571 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1573 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1574 gimple_call_set_lhs (stmt1, NULL_TREE);
1575 gimple_call_set_arg (stmt1, 1, new_str_cst);
1576 gimple_call_set_arg (stmt1, 2,
1577 build_int_cst (TREE_TYPE (len1), src_len));
1578 update_stmt (stmt1);
1579 unlink_stmt_vdef (stmt2);
1580 gsi_remove (gsi_p, true);
1581 release_defs (stmt2);
1582 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1583 release_ssa_name (lhs1);
1588 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1589 assignment, remove STMT1 and change memset call into
1591 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1593 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1594 gimple_call_set_arg (stmt2, 0, ptr1);
1595 gimple_call_set_arg (stmt2, 1, new_str_cst);
1596 gimple_call_set_arg (stmt2, 2,
1597 build_int_cst (TREE_TYPE (len2), src_len));
1598 unlink_stmt_vdef (stmt1);
1599 gsi_remove (&gsi, true);
1600 release_defs (stmt1);
1601 update_stmt (stmt2);
1612 /* Run bitwise and assignments throug the folder. If the first argument is an
1613 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1614 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1618 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1621 tree arg1 = gimple_assign_rhs1 (stmt);
1622 tree arg2 = gimple_assign_rhs2 (stmt);
1624 if (TREE_CODE (arg2) != INTEGER_CST)
1627 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1629 gimple def = SSA_NAME_DEF_STMT (arg1);
1631 if (gimple_assign_cast_p (def)
1632 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1634 tree op = gimple_assign_rhs1 (def);
1636 if (TREE_CODE (op) == ADDR_EXPR)
1641 res = fold_binary_loc (gimple_location (stmt),
1642 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1644 if (res && is_gimple_min_invariant (res))
1646 gimple_assign_set_rhs_from_tree (gsi, res);
1653 /* Perform re-associations of the plus or minus statement STMT that are
1654 always permitted. */
1657 associate_plusminus (gimple stmt)
1659 tree rhs1 = gimple_assign_rhs1 (stmt);
1660 tree rhs2 = gimple_assign_rhs2 (stmt);
1661 enum tree_code code = gimple_assign_rhs_code (stmt);
1662 gimple_stmt_iterator gsi;
1665 /* We can't reassociate at all for saturating types. */
1666 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1669 /* First contract negates. */
1674 /* A +- (-B) -> A -+ B. */
1675 if (TREE_CODE (rhs2) == SSA_NAME)
1677 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1678 if (is_gimple_assign (def_stmt)
1679 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1681 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1682 gimple_assign_set_rhs_code (stmt, code);
1683 rhs2 = gimple_assign_rhs1 (def_stmt);
1684 gimple_assign_set_rhs2 (stmt, rhs2);
1685 gimple_set_modified (stmt, true);
1690 /* (-A) + B -> B - A. */
1691 if (TREE_CODE (rhs1) == SSA_NAME
1692 && code == PLUS_EXPR)
1694 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1695 if (is_gimple_assign (def_stmt)
1696 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1699 gimple_assign_set_rhs_code (stmt, code);
1701 gimple_assign_set_rhs1 (stmt, rhs1);
1702 rhs2 = gimple_assign_rhs1 (def_stmt);
1703 gimple_assign_set_rhs2 (stmt, rhs2);
1704 gimple_set_modified (stmt, true);
1711 /* We can't reassociate floating-point or fixed-point plus or minus
1712 because of saturation to +-Inf. */
1713 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1714 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1717 /* Second match patterns that allow contracting a plus-minus pair
1718 irrespective of overflow issues.
1720 (A +- B) - A -> +- B
1722 (CST +- A) +- CST -> CST +- A
1723 (A + CST) +- CST -> A + CST
1726 A - (A +- B) -> -+ B
1727 A +- (B +- A) -> +- B
1728 CST +- (CST +- A) -> CST +- A
1729 CST +- (A +- CST) -> CST +- A
1732 via commutating the addition and contracting operations to zero
1733 by reassociation. */
1735 gsi = gsi_for_stmt (stmt);
1736 if (TREE_CODE (rhs1) == SSA_NAME)
1738 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1739 if (is_gimple_assign (def_stmt))
1741 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1742 if (def_code == PLUS_EXPR
1743 || def_code == MINUS_EXPR)
1745 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1746 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1747 if (operand_equal_p (def_rhs1, rhs2, 0)
1748 && code == MINUS_EXPR)
1750 /* (A +- B) - A -> +- B. */
1751 code = ((def_code == PLUS_EXPR)
1752 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1755 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1756 gcc_assert (gsi_stmt (gsi) == stmt);
1757 gimple_set_modified (stmt, true);
1759 else if (operand_equal_p (def_rhs2, rhs2, 0)
1760 && code != def_code)
1762 /* (A +- B) -+ B -> A. */
1763 code = TREE_CODE (def_rhs1);
1766 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1767 gcc_assert (gsi_stmt (gsi) == stmt);
1768 gimple_set_modified (stmt, true);
1770 else if (TREE_CODE (rhs2) == INTEGER_CST
1771 && TREE_CODE (def_rhs1) == INTEGER_CST)
1773 /* (CST +- A) +- CST -> CST +- A. */
1774 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1776 if (cst && !TREE_OVERFLOW (cst))
1779 gimple_assign_set_rhs_code (stmt, code);
1781 gimple_assign_set_rhs1 (stmt, rhs1);
1783 gimple_assign_set_rhs2 (stmt, rhs2);
1784 gimple_set_modified (stmt, true);
1787 else if (TREE_CODE (rhs2) == INTEGER_CST
1788 && TREE_CODE (def_rhs2) == INTEGER_CST
1789 && def_code == PLUS_EXPR)
1791 /* (A + CST) +- CST -> A + CST. */
1792 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1794 if (cst && !TREE_OVERFLOW (cst))
1797 gimple_assign_set_rhs_code (stmt, code);
1799 gimple_assign_set_rhs1 (stmt, rhs1);
1801 gimple_assign_set_rhs2 (stmt, rhs2);
1802 gimple_set_modified (stmt, true);
1806 else if (def_code == BIT_NOT_EXPR
1807 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1809 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1810 if (code == PLUS_EXPR
1811 && operand_equal_p (def_rhs1, rhs2, 0))
1815 rhs1 = build_int_cst (TREE_TYPE (rhs2), -1);
1817 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1818 gcc_assert (gsi_stmt (gsi) == stmt);
1819 gimple_set_modified (stmt, true);
1821 else if (code == PLUS_EXPR
1822 && integer_onep (rhs1))
1828 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1829 gcc_assert (gsi_stmt (gsi) == stmt);
1830 gimple_set_modified (stmt, true);
1836 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
1838 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1839 if (is_gimple_assign (def_stmt))
1841 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1842 if (def_code == PLUS_EXPR
1843 || def_code == MINUS_EXPR)
1845 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1846 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1847 if (operand_equal_p (def_rhs1, rhs1, 0)
1848 && code == MINUS_EXPR)
1850 /* A - (A +- B) -> -+ B. */
1851 code = ((def_code == PLUS_EXPR)
1852 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
1855 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1856 gcc_assert (gsi_stmt (gsi) == stmt);
1857 gimple_set_modified (stmt, true);
1859 else if (operand_equal_p (def_rhs2, rhs1, 0)
1860 && code != def_code)
1862 /* A +- (B +- A) -> +- B. */
1863 code = ((code == PLUS_EXPR)
1864 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
1867 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1868 gcc_assert (gsi_stmt (gsi) == stmt);
1869 gimple_set_modified (stmt, true);
1871 else if (TREE_CODE (rhs1) == INTEGER_CST
1872 && TREE_CODE (def_rhs1) == INTEGER_CST)
1874 /* CST +- (CST +- A) -> CST +- A. */
1875 tree cst = fold_binary (code, TREE_TYPE (rhs2),
1877 if (cst && !TREE_OVERFLOW (cst))
1879 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
1880 gimple_assign_set_rhs_code (stmt, code);
1882 gimple_assign_set_rhs1 (stmt, rhs1);
1884 gimple_assign_set_rhs2 (stmt, rhs2);
1885 gimple_set_modified (stmt, true);
1888 else if (TREE_CODE (rhs1) == INTEGER_CST
1889 && TREE_CODE (def_rhs2) == INTEGER_CST)
1891 /* CST +- (A +- CST) -> CST +- A. */
1892 tree cst = fold_binary (def_code == code
1893 ? PLUS_EXPR : MINUS_EXPR,
1896 if (cst && !TREE_OVERFLOW (cst))
1899 gimple_assign_set_rhs1 (stmt, rhs1);
1901 gimple_assign_set_rhs2 (stmt, rhs2);
1902 gimple_set_modified (stmt, true);
1906 else if (def_code == BIT_NOT_EXPR
1907 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
1909 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1910 if (code == PLUS_EXPR
1911 && operand_equal_p (def_rhs1, rhs1, 0))
1915 rhs1 = build_int_cst (TREE_TYPE (rhs1), -1);
1917 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1918 gcc_assert (gsi_stmt (gsi) == stmt);
1919 gimple_set_modified (stmt, true);
1926 if (gimple_modified_p (stmt))
1928 fold_stmt_inplace (stmt);
1933 /* Main entry point for the forward propagation optimizer. */
1936 tree_ssa_forward_propagate_single_use_vars (void)
1939 unsigned int todoflags = 0;
1941 cfg_changed = false;
1945 gimple_stmt_iterator gsi;
1947 /* Note we update GSI within the loop as necessary. */
1948 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1950 gimple stmt = gsi_stmt (gsi);
1952 /* If this statement sets an SSA_NAME to an address,
1953 try to propagate the address into the uses of the SSA_NAME. */
1954 if (is_gimple_assign (stmt))
1956 tree lhs = gimple_assign_lhs (stmt);
1957 tree rhs = gimple_assign_rhs1 (stmt);
1959 if (TREE_CODE (lhs) != SSA_NAME)
1965 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1966 /* Handle pointer conversions on invariant addresses
1967 as well, as this is valid gimple. */
1968 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1969 && TREE_CODE (rhs) == ADDR_EXPR
1970 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1972 tree base = get_base_address (TREE_OPERAND (rhs, 0));
1975 || decl_address_invariant_p (base))
1976 && !stmt_references_abnormal_ssa_name (stmt)
1977 && forward_propagate_addr_expr (lhs, rhs))
1979 release_defs (stmt);
1980 todoflags |= TODO_remove_unused_locals;
1981 gsi_remove (&gsi, true);
1986 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1988 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
1989 /* ??? Better adjust the interface to that function
1990 instead of building new trees here. */
1991 && forward_propagate_addr_expr
1995 fold_build2 (MEM_REF,
1996 TREE_TYPE (TREE_TYPE (rhs)),
2000 gimple_assign_rhs2 (stmt))))))
2002 release_defs (stmt);
2003 todoflags |= TODO_remove_unused_locals;
2004 gsi_remove (&gsi, true);
2006 else if (is_gimple_min_invariant (rhs))
2008 /* Make sure to fold &a[0] + off_1 here. */
2009 fold_stmt_inplace (stmt);
2011 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2017 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
2018 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
2019 && TREE_CODE (rhs) == SSA_NAME)
2021 simplify_not_neg_expr (&gsi);
2024 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
2026 /* In this case the entire COND_EXPR is in rhs1. */
2028 fold_defer_overflow_warnings ();
2029 did_something = forward_propagate_into_cond (&gsi);
2030 stmt = gsi_stmt (gsi);
2031 if (did_something == 2)
2033 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
2034 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2037 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
2040 if (forward_propagate_comparison (stmt))
2042 release_defs (stmt);
2043 todoflags |= TODO_remove_unused_locals;
2044 gsi_remove (&gsi, true);
2049 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
2051 simplify_bitwise_and (&gsi, stmt);
2054 else if (gimple_assign_rhs_code (stmt) == PLUS_EXPR
2055 || gimple_assign_rhs_code (stmt) == MINUS_EXPR)
2057 associate_plusminus (stmt);
2063 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2065 simplify_gimple_switch (stmt);
2068 else if (gimple_code (stmt) == GIMPLE_COND)
2071 fold_defer_overflow_warnings ();
2072 did_something = forward_propagate_into_gimple_cond (stmt);
2073 if (did_something == 2)
2075 fold_undefer_overflow_warnings (did_something, stmt,
2076 WARN_STRICT_OVERFLOW_CONDITIONAL);
2079 else if (is_gimple_call (stmt))
2081 tree callee = gimple_call_fndecl (stmt);
2082 if (callee == NULL_TREE
2083 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2084 || !simplify_builtin_call (&gsi, callee))
2093 todoflags |= TODO_cleanup_cfg;
2099 gate_forwprop (void)
2101 return flag_tree_forwprop;
2104 struct gimple_opt_pass pass_forwprop =
2108 "forwprop", /* name */
2109 gate_forwprop, /* gate */
2110 tree_ssa_forward_propagate_single_use_vars, /* execute */
2113 0, /* static_pass_number */
2114 TV_TREE_FORWPROP, /* tv_id */
2115 PROP_cfg | PROP_ssa, /* properties_required */
2116 0, /* properties_provided */
2117 0, /* properties_destroyed */
2118 0, /* todo_flags_start */
2122 | TODO_verify_ssa /* todo_flags_finish */