1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name, tree *final_name_p)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_copy_p (use_stmt)
190 || TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME
191 || gimple_assign_rhs1 (use_stmt) != name)
194 /* Continue searching uses of the copy destination. */
195 name = gimple_assign_lhs (use_stmt);
199 *final_name_p = name;
204 /* Get the statement we can propagate from into NAME skipping
205 trivial copies. Returns the statement which defines the
206 propagation source or NULL_TREE if there is no such one.
207 If SINGLE_USE_ONLY is set considers only sources which have
208 a single use chain up to NAME. If SINGLE_USE_P is non-null,
209 it is set to whether the chain to NAME is a single use chain
210 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
213 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
215 bool single_use = true;
218 gimple def_stmt = SSA_NAME_DEF_STMT (name);
220 if (!has_single_use (name))
227 /* If name is defined by a PHI node or is the default def, bail out. */
228 if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
231 /* If name is not a simple copy destination, we found it. */
232 if (!gimple_assign_copy_p (def_stmt)
233 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
237 if (!single_use_only && single_use_p)
238 *single_use_p = single_use;
240 /* We can look through pointer conversions in the search
241 for a useful stmt for the comparison folding. */
242 rhs = gimple_assign_rhs1 (def_stmt);
243 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
244 && TREE_CODE (rhs) == SSA_NAME
245 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
246 && POINTER_TYPE_P (TREE_TYPE (rhs)))
253 /* Continue searching the def of the copy source name. */
254 name = gimple_assign_rhs1 (def_stmt);
259 /* Checks if the destination ssa name in DEF_STMT can be used as
260 propagation source. Returns true if so, otherwise false. */
263 can_propagate_from (gimple def_stmt)
268 gcc_assert (is_gimple_assign (def_stmt));
269 /* If the rhs has side-effects we cannot propagate from it. */
270 if (gimple_has_volatile_ops (def_stmt))
273 /* If the rhs is a load we cannot propagate from it. */
274 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
275 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
278 /* Constants can be always propagated. */
279 if (is_gimple_min_invariant
280 (rhs_to_tree (TREE_TYPE (gimple_assign_lhs (def_stmt)), def_stmt)))
283 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
284 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
285 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
288 /* If the definition is a conversion of a pointer to a function type,
289 then we can not apply optimizations as some targets require
290 function pointers to be canonicalized and in this case this
291 optimization could eliminate a necessary canonicalization. */
292 if (is_gimple_assign (def_stmt)
293 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
295 tree rhs = gimple_assign_rhs1 (def_stmt);
296 if (POINTER_TYPE_P (TREE_TYPE (rhs))
297 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
303 /* Remove a copy chain ending in NAME along the defs but not
304 further or including UP_TO_STMT. If NAME was replaced in
305 its only use then this function can be used to clean up
306 dead stmts. Returns true if UP_TO_STMT can be removed
307 as well, otherwise false. */
310 remove_prop_source_from_use (tree name, gimple up_to_stmt)
312 gimple_stmt_iterator gsi;
316 if (!has_zero_uses (name))
319 stmt = SSA_NAME_DEF_STMT (name);
320 if (stmt == up_to_stmt)
323 gsi = gsi_for_stmt (stmt);
325 gsi_remove (&gsi, true);
327 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
328 } while (name && TREE_CODE (name) == SSA_NAME);
333 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
334 converted to type TYPE.
336 This should disappear, but is needed so we can combine expressions and use
337 the fold() interfaces. Long term, we need to develop folding and combine
338 routines that deal with gimple exclusively . */
341 rhs_to_tree (tree type, gimple stmt)
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
345 return fold_build2 (code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
348 return build1 (code, type, gimple_assign_rhs1 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
350 return gimple_assign_rhs1 (stmt);
355 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
356 the folded result in a form suitable for COND_EXPR_COND or
357 NULL_TREE, if there is no suitable simplified form. If
358 INVARIANT_ONLY is true only gimple_min_invariant results are
359 considered simplified. */
362 combine_cond_expr_cond (enum tree_code code, tree type,
363 tree op0, tree op1, bool invariant_only)
367 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
369 t = fold_binary (code, type, op0, op1);
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
386 /* Propagate from the ssa name definition statements of COND_EXPR
387 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
388 Returns zero if no statement was changed, one if there were
389 changes and two if cfg_cleanup needs to run.
391 This must be kept in sync with forward_propagate_into_cond. */
394 forward_propagate_into_gimple_cond (gimple stmt)
396 int did_something = 0;
399 tree tmp = NULL_TREE;
400 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
402 bool single_use0_p = false, single_use1_p = false;
403 enum tree_code code = gimple_cond_code (stmt);
405 /* We can do tree combining on SSA_NAME and comparison expressions. */
406 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison
407 && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
409 /* For comparisons use the first operand, that is likely to
410 simplify comparisons against constants. */
411 name = gimple_cond_lhs (stmt);
412 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
413 if (def_stmt && can_propagate_from (def_stmt))
415 tree op1 = gimple_cond_rhs (stmt);
416 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
417 tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
418 op1, !single_use0_p);
420 /* If that wasn't successful, try the second operand. */
422 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
424 tree op0 = gimple_cond_lhs (stmt);
425 name = gimple_cond_rhs (stmt);
426 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
427 if (!def_stmt || !can_propagate_from (def_stmt))
428 return did_something;
430 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
431 tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
434 /* If that wasn't successful either, try both operands. */
437 && rhs1 != NULL_TREE)
438 tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
439 fold_convert (TREE_TYPE (rhs0), rhs1),
440 !(single_use0_p && single_use1_p));
445 if (dump_file && tmp)
447 tree cond = build2 (gimple_cond_code (stmt),
449 gimple_cond_lhs (stmt),
450 gimple_cond_rhs (stmt));
451 fprintf (dump_file, " Replaced '");
452 print_generic_expr (dump_file, cond, 0);
453 fprintf (dump_file, "' with '");
454 print_generic_expr (dump_file, tmp, 0);
455 fprintf (dump_file, "'\n");
458 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
461 /* Remove defining statements. */
462 remove_prop_source_from_use (name, NULL);
464 if (is_gimple_min_invariant (tmp))
466 else if (did_something == 0)
469 /* Continue combining. */
476 return did_something;
480 /* Propagate from the ssa name definition statements of COND_EXPR
481 in the rhs of statement STMT into the conditional if that simplifies it.
482 Returns zero if no statement was changed, one if there were
483 changes and two if cfg_cleanup needs to run.
485 This must be kept in sync with forward_propagate_into_gimple_cond. */
488 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
490 gimple stmt = gsi_stmt (*gsi_p);
491 int did_something = 0;
494 tree tmp = NULL_TREE;
495 tree cond = gimple_assign_rhs1 (stmt);
496 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
498 bool single_use0_p = false, single_use1_p = false;
500 /* We can do tree combining on SSA_NAME and comparison expressions. */
501 if (COMPARISON_CLASS_P (cond)
502 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
504 /* For comparisons use the first operand, that is likely to
505 simplify comparisons against constants. */
506 name = TREE_OPERAND (cond, 0);
507 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
508 if (def_stmt && can_propagate_from (def_stmt))
510 tree op1 = TREE_OPERAND (cond, 1);
511 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
512 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
513 rhs0, op1, !single_use0_p);
515 /* If that wasn't successful, try the second operand. */
517 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
519 tree op0 = TREE_OPERAND (cond, 0);
520 name = TREE_OPERAND (cond, 1);
521 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
522 if (!def_stmt || !can_propagate_from (def_stmt))
523 return did_something;
525 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
526 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
527 op0, rhs1, !single_use1_p);
529 /* If that wasn't successful either, try both operands. */
532 && rhs1 != NULL_TREE)
533 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
534 rhs0, fold_convert (TREE_TYPE (rhs0),
536 !(single_use0_p && single_use1_p));
538 else if (TREE_CODE (cond) == SSA_NAME)
541 def_stmt = get_prop_source_stmt (name, true, NULL);
542 if (def_stmt || !can_propagate_from (def_stmt))
543 return did_something;
545 rhs0 = gimple_assign_rhs1 (def_stmt);
546 tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
547 build_int_cst (TREE_TYPE (rhs0), 0),
553 if (dump_file && tmp)
555 fprintf (dump_file, " Replaced '");
556 print_generic_expr (dump_file, cond, 0);
557 fprintf (dump_file, "' with '");
558 print_generic_expr (dump_file, tmp, 0);
559 fprintf (dump_file, "'\n");
562 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
563 stmt = gsi_stmt (*gsi_p);
566 /* Remove defining statements. */
567 remove_prop_source_from_use (name, NULL);
569 if (is_gimple_min_invariant (tmp))
571 else if (did_something == 0)
574 /* Continue combining. */
581 return did_something;
584 /* We've just substituted an ADDR_EXPR into stmt. Update all the
585 relevant data structures to match. */
588 tidy_after_forward_propagate_addr (gimple stmt)
590 /* We may have turned a trapping insn into a non-trapping insn. */
591 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
592 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
595 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
596 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
599 /* DEF_RHS contains the address of the 0th element in an array.
600 USE_STMT uses type of DEF_RHS to compute the address of an
601 arbitrary element within the array. The (variable) byte offset
602 of the element is contained in OFFSET.
604 We walk back through the use-def chains of OFFSET to verify that
605 it is indeed computing the offset of an element within the array
606 and extract the index corresponding to the given byte offset.
608 We then try to fold the entire address expression into a form
611 If we are successful, we replace the right hand side of USE_STMT
612 with the new address computation. */
615 forward_propagate_addr_into_variable_array_index (tree offset,
617 gimple_stmt_iterator *use_stmt_gsi)
620 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
623 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
624 if (!host_integerp (tunit, 1))
627 /* Get the offset's defining statement. */
628 offset_def = SSA_NAME_DEF_STMT (offset);
630 /* Try to find an expression for a proper index. This is either a
631 multiplication expression by the element size or just the ssa name we came
632 along in case the element size is one. In that case, however, we do not
633 allow multiplications because they can be computing index to a higher
634 level dimension (PR 37861). */
635 if (integer_onep (tunit))
637 if (is_gimple_assign (offset_def)
638 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
645 /* The statement which defines OFFSET before type conversion
646 must be a simple GIMPLE_ASSIGN. */
647 if (!is_gimple_assign (offset_def))
650 /* The RHS of the statement which defines OFFSET must be a
651 multiplication of an object by the size of the array elements.
652 This implicitly verifies that the size of the array elements
654 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
655 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
656 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
658 /* The first operand to the MULT_EXPR is the desired index. */
659 index = gimple_assign_rhs1 (offset_def);
661 /* If we have idx * tunit + CST * tunit re-associate that. */
662 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
663 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
664 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
665 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
666 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
667 gimple_assign_rhs2 (offset_def),
668 tunit)) != NULL_TREE)
670 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
671 if (is_gimple_assign (offset_def2)
672 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
673 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
674 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
676 index = fold_build2 (gimple_assign_rhs_code (offset_def),
678 gimple_assign_rhs1 (offset_def2), tmp);
687 /* Replace the pointer addition with array indexing. */
688 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
689 true, GSI_SAME_STMT);
690 gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
691 use_stmt = gsi_stmt (*use_stmt_gsi);
692 TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
695 /* That should have created gimple, so there is no need to
696 record information to undo the propagation. */
697 fold_stmt_inplace (use_stmt);
698 tidy_after_forward_propagate_addr (use_stmt);
702 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
703 ADDR_EXPR <whatever>.
705 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
706 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
707 node or for recovery of array indexing from pointer arithmetic.
709 Return true if the propagation was successful (the propagation can
710 be not totally successful, yet things may have been changed). */
713 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
714 gimple_stmt_iterator *use_stmt_gsi,
717 tree lhs, rhs, rhs2, array_ref;
719 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
720 enum tree_code rhs_code;
722 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
724 lhs = gimple_assign_lhs (use_stmt);
725 rhs_code = gimple_assign_rhs_code (use_stmt);
726 rhs = gimple_assign_rhs1 (use_stmt);
728 /* Trivial cases. The use statement could be a trivial copy or a
729 useless conversion. Recurse to the uses of the lhs as copyprop does
730 not copy through different variant pointers and FRE does not catch
731 all useless conversions. Treat the case of a single-use name and
732 a conversion to def_rhs type separate, though. */
733 if (TREE_CODE (lhs) == SSA_NAME
734 && ((rhs_code == SSA_NAME && rhs == name)
735 || CONVERT_EXPR_CODE_P (rhs_code)))
737 /* Only recurse if we don't deal with a single use or we cannot
738 do the propagation to the current statement. In particular
739 we can end up with a conversion needed for a non-invariant
740 address which we cannot do in a single statement. */
742 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
743 && !is_gimple_min_invariant (def_rhs)))
744 return forward_propagate_addr_expr (lhs, def_rhs);
746 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
747 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
748 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
750 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
754 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
755 ADDR_EXPR will not appear on the LHS. */
756 lhsp = gimple_assign_lhs_ptr (use_stmt);
757 while (handled_component_p (*lhsp))
758 lhsp = &TREE_OPERAND (*lhsp, 0);
761 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
762 propagate the ADDR_EXPR into the use of NAME and fold the result. */
763 if (TREE_CODE (lhs) == INDIRECT_REF
764 && TREE_OPERAND (lhs, 0) == name
765 && may_propagate_address_into_dereference (def_rhs, lhs)
766 && (lhsp != gimple_assign_lhs_ptr (use_stmt)
767 || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
770 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
771 fold_stmt_inplace (use_stmt);
772 tidy_after_forward_propagate_addr (use_stmt);
774 /* Continue propagating into the RHS if this was not the only use. */
779 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
780 nodes from the RHS. */
781 rhsp = gimple_assign_rhs1_ptr (use_stmt);
782 while (handled_component_p (*rhsp)
783 || TREE_CODE (*rhsp) == ADDR_EXPR)
784 rhsp = &TREE_OPERAND (*rhsp, 0);
787 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
788 propagate the ADDR_EXPR into the use of NAME and fold the result. */
789 if (TREE_CODE (rhs) == INDIRECT_REF
790 && TREE_OPERAND (rhs, 0) == name
791 && may_propagate_address_into_dereference (def_rhs, rhs))
793 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
794 fold_stmt_inplace (use_stmt);
795 tidy_after_forward_propagate_addr (use_stmt);
799 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
800 propagate the ADDR_EXPR into the use of NAME and try to
801 create a VCE and fold the result. */
802 if (TREE_CODE (rhs) == INDIRECT_REF
803 && TREE_OPERAND (rhs, 0) == name
804 && TYPE_SIZE (TREE_TYPE (rhs))
805 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
806 /* Function decls should not be used for VCE either as it could be a
807 function descriptor that we want and not the actual function code. */
808 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
809 /* We should not convert volatile loads to non volatile loads. */
810 && !TYPE_VOLATILE (TREE_TYPE (rhs))
811 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
812 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
813 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0))
815 tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
816 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
817 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
819 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
820 valid if we can replace the whole rhs of the use statement. */
821 if (rhs != gimple_assign_rhs1 (use_stmt))
823 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
825 gimple_assign_set_rhs1 (use_stmt, new_rhs);
826 tidy_after_forward_propagate_addr (use_stmt);
829 /* If the defining rhs comes from an indirect reference, then do not
830 convert into a VIEW_CONVERT_EXPR. */
831 def_rhs_base = TREE_OPERAND (def_rhs, 0);
832 while (handled_component_p (def_rhs_base))
833 def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
834 if (!INDIRECT_REF_P (def_rhs_base))
836 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
837 reference. Place it there and fold the thing. */
839 fold_stmt_inplace (use_stmt);
840 tidy_after_forward_propagate_addr (use_stmt);
845 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
847 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
848 || gimple_assign_rhs1 (use_stmt) != name)
851 /* The remaining cases are all for turning pointer arithmetic into
852 array indexing. They only apply when we have the address of
853 element zero in an array. If that is not the case then there
855 array_ref = TREE_OPERAND (def_rhs, 0);
856 if (TREE_CODE (array_ref) != ARRAY_REF
857 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
858 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
861 rhs2 = gimple_assign_rhs2 (use_stmt);
862 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
863 of the elements in X into &x[C1 + C2/element size]. */
864 if (TREE_CODE (rhs2) == INTEGER_CST)
866 tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
870 gimple_assign_set_rhs_from_tree (use_stmt_gsi,
871 unshare_expr (new_rhs));
872 use_stmt = gsi_stmt (*use_stmt_gsi);
873 update_stmt (use_stmt);
874 tidy_after_forward_propagate_addr (use_stmt);
879 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
880 converting a multiplication of an index by the size of the
881 array elements, then the result is converted into the proper
882 type for the arithmetic. */
883 if (TREE_CODE (rhs2) == SSA_NAME
884 && integer_zerop (TREE_OPERAND (array_ref, 1))
885 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
886 /* Avoid problems with IVopts creating PLUS_EXPRs with a
887 different type than their operands. */
888 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
889 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
894 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
896 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
897 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
898 node or for recovery of array indexing from pointer arithmetic.
899 Returns true, if all uses have been propagated into. */
902 forward_propagate_addr_expr (tree name, tree rhs)
904 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
905 imm_use_iterator iter;
908 bool single_use_p = has_single_use (name);
910 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
915 /* If the use is not in a simple assignment statement, then
916 there is nothing we can do. */
917 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
923 /* If the use is in a deeper loop nest, then we do not want
924 to propagate the ADDR_EXPR into the loop as that is likely
925 adding expression evaluations into the loop. */
926 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
933 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
934 push_stmt_changes (&use_stmt);
935 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
937 /* If the use has moved to a different statement adjust
938 the update machinery. */
939 if (use_stmt != gsi_stmt (gsi))
941 pop_stmt_changes (&use_stmt);
942 use_stmt = gsi_stmt (gsi);
943 update_stmt (use_stmt);
946 pop_stmt_changes (&use_stmt);
950 /* Remove intermediate now unused copy and conversion chains. */
951 use_rhs = gimple_assign_rhs1 (use_stmt);
953 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
954 && (TREE_CODE (use_rhs) == SSA_NAME
955 || (CONVERT_EXPR_P (use_rhs)
956 && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
958 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
959 release_defs (use_stmt);
960 gsi_remove (&gsi, true);
967 /* Forward propagate the comparison defined in STMT like
968 cond_1 = x CMP y to uses of the form
972 Returns true if stmt is now unused. */
975 forward_propagate_comparison (gimple stmt)
977 tree name = gimple_assign_lhs (stmt);
979 tree tmp = NULL_TREE;
981 /* Don't propagate ssa names that occur in abnormal phis. */
982 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
983 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
984 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
985 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
988 /* Do not un-cse comparisons. But propagate through copies. */
989 use_stmt = get_prop_dest_stmt (name, &name);
993 /* Conversion of the condition result to another integral type. */
994 if (is_gimple_assign (use_stmt)
995 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
996 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
998 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
999 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1001 tree lhs = gimple_assign_lhs (use_stmt);
1003 /* We can propagate the condition into a conversion. */
1004 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1006 /* Avoid using fold here as that may create a COND_EXPR with
1007 non-boolean condition as canonical form. */
1008 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1009 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1011 /* We can propagate the condition into X op CST where op
1012 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1013 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1015 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1016 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1018 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1019 tree cst = gimple_assign_rhs2 (use_stmt);
1022 cond = build2 (gimple_assign_rhs_code (stmt),
1024 gimple_assign_rhs1 (stmt),
1025 gimple_assign_rhs2 (stmt));
1027 tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
1028 if (tmp == NULL_TREE)
1031 /* We can propagate the condition into a statement that
1032 computes the logical negation of the comparison result. */
1033 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1035 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1036 bool nans = HONOR_NANS (TYPE_MODE (type));
1037 enum tree_code code;
1038 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1039 if (code == ERROR_MARK)
1042 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1043 gimple_assign_rhs2 (stmt));
1049 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1050 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1051 use_stmt = gsi_stmt (gsi);
1052 update_stmt (use_stmt);
1055 /* Remove defining statements. */
1056 remove_prop_source_from_use (name, stmt);
1058 if (dump_file && (dump_flags & TDF_DETAILS))
1060 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1062 fprintf (dump_file, " Replaced '");
1063 print_generic_expr (dump_file, old_rhs, dump_flags);
1064 fprintf (dump_file, "' with '");
1065 print_generic_expr (dump_file, tmp, dump_flags);
1066 fprintf (dump_file, "'\n");
1075 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1076 If so, we can change STMT into lhs = y which can later be copy
1077 propagated. Similarly for negation.
1079 This could trivially be formulated as a forward propagation
1080 to immediate uses. However, we already had an implementation
1081 from DOM which used backward propagation via the use-def links.
1083 It turns out that backward propagation is actually faster as
1084 there's less work to do for each NOT/NEG expression we find.
1085 Backwards propagation needs to look at the statement in a single
1086 backlink. Forward propagation needs to look at potentially more
1087 than one forward link. */
1090 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1092 gimple stmt = gsi_stmt (*gsi_p);
1093 tree rhs = gimple_assign_rhs1 (stmt);
1094 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1096 /* See if the RHS_DEF_STMT has the same form as our statement. */
1097 if (is_gimple_assign (rhs_def_stmt)
1098 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1100 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1102 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1103 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1104 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1106 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1107 stmt = gsi_stmt (*gsi_p);
1113 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1114 the condition which we may be able to optimize better. */
1117 simplify_gimple_switch (gimple stmt)
1119 tree cond = gimple_switch_index (stmt);
1123 /* The optimization that we really care about is removing unnecessary
1124 casts. That will let us do much better in propagating the inferred
1125 constant at the switch target. */
1126 if (TREE_CODE (cond) == SSA_NAME)
1128 def_stmt = SSA_NAME_DEF_STMT (cond);
1129 if (is_gimple_assign (def_stmt))
1131 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1136 def = gimple_assign_rhs1 (def_stmt);
1138 #ifdef ENABLE_CHECKING
1139 /* ??? Why was Jeff testing this? We are gimple... */
1140 gcc_assert (is_gimple_val (def));
1143 to = TREE_TYPE (cond);
1144 ti = TREE_TYPE (def);
1146 /* If we have an extension that preserves value, then we
1147 can copy the source value into the switch. */
1149 need_precision = TYPE_PRECISION (ti);
1151 if (! INTEGRAL_TYPE_P (ti))
1153 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1155 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1156 need_precision += 1;
1157 if (TYPE_PRECISION (to) < need_precision)
1162 gimple_switch_set_index (stmt, def);
1170 /* Run bitwise and assignments throug the folder. If the first argument is an
1171 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1172 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1176 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1179 tree arg1 = gimple_assign_rhs1 (stmt);
1180 tree arg2 = gimple_assign_rhs2 (stmt);
1182 if (TREE_CODE (arg2) != INTEGER_CST)
1185 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1187 gimple def = SSA_NAME_DEF_STMT (arg1);
1189 if (gimple_assign_cast_p (def)
1190 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1192 tree op = gimple_assign_rhs1 (def);
1194 if (TREE_CODE (op) == ADDR_EXPR)
1199 res = fold_binary (BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1201 if (res && is_gimple_min_invariant (res))
1203 gimple_assign_set_rhs_from_tree (gsi, res);
1209 /* Main entry point for the forward propagation optimizer. */
1212 tree_ssa_forward_propagate_single_use_vars (void)
1215 unsigned int todoflags = 0;
1217 cfg_changed = false;
1221 gimple_stmt_iterator gsi;
1223 /* Note we update GSI within the loop as necessary. */
1224 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1226 gimple stmt = gsi_stmt (gsi);
1228 /* If this statement sets an SSA_NAME to an address,
1229 try to propagate the address into the uses of the SSA_NAME. */
1230 if (is_gimple_assign (stmt))
1232 tree lhs = gimple_assign_lhs (stmt);
1233 tree rhs = gimple_assign_rhs1 (stmt);
1235 if (TREE_CODE (lhs) != SSA_NAME)
1241 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1242 /* Handle pointer conversions on invariant addresses
1243 as well, as this is valid gimple. */
1244 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1245 && TREE_CODE (rhs) == ADDR_EXPR
1246 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1249 if (!stmt_references_abnormal_ssa_name (stmt)
1250 && forward_propagate_addr_expr (lhs, rhs))
1252 release_defs (stmt);
1253 todoflags |= TODO_remove_unused_locals;
1254 gsi_remove (&gsi, true);
1259 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1260 && is_gimple_min_invariant (rhs))
1262 /* Make sure to fold &a[0] + off_1 here. */
1263 fold_stmt_inplace (stmt);
1265 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1268 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1269 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1270 && TREE_CODE (rhs) == SSA_NAME)
1272 simplify_not_neg_expr (&gsi);
1275 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1277 /* In this case the entire COND_EXPR is in rhs1. */
1279 fold_defer_overflow_warnings ();
1280 did_something = forward_propagate_into_cond (&gsi);
1281 stmt = gsi_stmt (gsi);
1282 if (did_something == 2)
1284 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1285 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1288 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1291 if (forward_propagate_comparison (stmt))
1293 release_defs (stmt);
1294 todoflags |= TODO_remove_unused_locals;
1295 gsi_remove (&gsi, true);
1300 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1302 simplify_bitwise_and (&gsi, stmt);
1308 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1310 simplify_gimple_switch (stmt);
1313 else if (gimple_code (stmt) == GIMPLE_COND)
1316 fold_defer_overflow_warnings ();
1317 did_something = forward_propagate_into_gimple_cond (stmt);
1318 if (did_something == 2)
1320 fold_undefer_overflow_warnings (did_something, stmt,
1321 WARN_STRICT_OVERFLOW_CONDITIONAL);
1330 todoflags |= TODO_cleanup_cfg;
1336 gate_forwprop (void)
1341 struct gimple_opt_pass pass_forwprop =
1345 "forwprop", /* name */
1346 gate_forwprop, /* gate */
1347 tree_ssa_forward_propagate_single_use_vars, /* execute */
1350 0, /* static_pass_number */
1351 TV_TREE_FORWPROP, /* tv_id */
1352 PROP_cfg | PROP_ssa, /* properties_required */
1353 0, /* properties_provided */
1354 0, /* properties_destroyed */
1355 0, /* todo_flags_start */
1359 | TODO_verify_ssa /* todo_flags_finish */