1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-dump.h"
32 #include "langhooks.h"
36 /* This pass propagates the RHS of assignment statements into use
37 sites of the LHS of the assignment. It's basically a specialized
38 form of tree combination. It is hoped all of this can disappear
39 when we have a generalized tree combiner.
41 One class of common cases we handle is forward propagating a single use
42 variable into a COND_EXPR.
46 if (x) goto ... else goto ...
48 Will be transformed into:
51 if (a COND b) goto ... else goto ...
53 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
55 Or (assuming c1 and c2 are constants):
59 if (x EQ/NEQ c2) goto ... else goto ...
61 Will be transformed into:
64 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
66 Similarly for x = a - c1.
72 if (x) goto ... else goto ...
74 Will be transformed into:
77 if (a == 0) goto ... else goto ...
79 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
80 For these cases, we propagate A into all, possibly more than one,
81 COND_EXPRs that use X.
87 if (x) goto ... else goto ...
89 Will be transformed into:
92 if (a != 0) goto ... else goto ...
94 (Assuming a is an integral type and x is a boolean or x is an
95 integral and a is a boolean.)
97 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
98 For these cases, we propagate A into all, possibly more than one,
99 COND_EXPRs that use X.
101 In addition to eliminating the variable and the statement which assigns
102 a value to the variable, we may be able to later thread the jump without
103 adding insane complexity in the dominator optimizer.
105 Also note these transformations can cascade. We handle this by having
106 a worklist of COND_EXPR statements to examine. As we make a change to
107 a statement, we put it back on the worklist to examine on the next
108 iteration of the main loop.
110 A second class of propagation opportunities arises for ADDR_EXPR
121 ptr = (type1*)&type2var;
124 Will get turned into (if type1 and type2 are the same size
125 and neither have volatile on them):
126 res = VIEW_CONVERT_EXPR<type1>(type2var)
131 ptr2 = ptr + <constant>;
135 ptr2 = &x[constant/elementsize];
140 offset = index * element_size;
141 offset_p = (pointer) offset;
142 ptr2 = ptr + offset_p
144 Will get turned into:
152 Provided that decl has known alignment >= 2, will get turned into
156 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
157 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 This will (of course) be extended as other needs arise. */
162 static bool forward_propagate_addr_expr (tree name, tree rhs);
164 /* Set to true if we delete EH edges during the optimization. */
165 static bool cfg_changed;
167 static tree rhs_to_tree (tree type, gimple stmt);
169 /* Get the next statement we can propagate NAME's value into skipping
170 trivial copies. Returns the statement that is suitable as a
171 propagation destination or NULL_TREE if there is no such one.
172 This only returns destinations in a single-use chain. FINAL_NAME_P
173 if non-NULL is written to the ssa name that represents the use. */
176 get_prop_dest_stmt (tree name, tree *final_name_p)
182 /* If name has multiple uses, bail out. */
183 if (!single_imm_use (name, &use, &use_stmt))
186 /* If this is not a trivial copy, we found it. */
187 if (!gimple_assign_ssa_name_copy_p (use_stmt)
188 || gimple_assign_rhs1 (use_stmt) != name)
191 /* Continue searching uses of the copy destination. */
192 name = gimple_assign_lhs (use_stmt);
196 *final_name_p = name;
201 /* Get the statement we can propagate from into NAME skipping
202 trivial copies. Returns the statement which defines the
203 propagation source or NULL_TREE if there is no such one.
204 If SINGLE_USE_ONLY is set considers only sources which have
205 a single use chain up to NAME. If SINGLE_USE_P is non-null,
206 it is set to whether the chain to NAME is a single use chain
207 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
210 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
212 bool single_use = true;
215 gimple def_stmt = SSA_NAME_DEF_STMT (name);
217 if (!has_single_use (name))
224 /* If name is defined by a PHI node or is the default def, bail out. */
225 if (!is_gimple_assign (def_stmt))
228 /* If def_stmt is not a simple copy, we possibly found it. */
229 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 if (!single_use_only && single_use_p)
234 *single_use_p = single_use;
236 /* We can look through pointer conversions in the search
237 for a useful stmt for the comparison folding. */
238 rhs = gimple_assign_rhs1 (def_stmt);
239 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
240 && TREE_CODE (rhs) == SSA_NAME
241 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
242 && POINTER_TYPE_P (TREE_TYPE (rhs)))
249 /* Continue searching the def of the copy source name. */
250 name = gimple_assign_rhs1 (def_stmt);
255 /* Checks if the destination ssa name in DEF_STMT can be used as
256 propagation source. Returns true if so, otherwise false. */
259 can_propagate_from (gimple def_stmt)
264 gcc_assert (is_gimple_assign (def_stmt));
266 /* If the rhs has side-effects we cannot propagate from it. */
267 if (gimple_has_volatile_ops (def_stmt))
270 /* If the rhs is a load we cannot propagate from it. */
271 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
272 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
275 /* Constants can be always propagated. */
276 if (gimple_assign_single_p (def_stmt)
277 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
280 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
281 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
282 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
285 /* If the definition is a conversion of a pointer to a function type,
286 then we can not apply optimizations as some targets require
287 function pointers to be canonicalized and in this case this
288 optimization could eliminate a necessary canonicalization. */
289 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
291 tree rhs = gimple_assign_rhs1 (def_stmt);
292 if (POINTER_TYPE_P (TREE_TYPE (rhs))
293 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
300 /* Remove a copy chain ending in NAME along the defs but not
301 further or including UP_TO_STMT. If NAME was replaced in
302 its only use then this function can be used to clean up
303 dead stmts. Returns true if UP_TO_STMT can be removed
304 as well, otherwise false. */
307 remove_prop_source_from_use (tree name, gimple up_to_stmt)
309 gimple_stmt_iterator gsi;
313 if (!has_zero_uses (name))
316 stmt = SSA_NAME_DEF_STMT (name);
317 if (stmt == up_to_stmt)
320 gsi = gsi_for_stmt (stmt);
322 gsi_remove (&gsi, true);
324 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
325 } while (name && TREE_CODE (name) == SSA_NAME);
330 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
331 converted to type TYPE.
333 This should disappear, but is needed so we can combine expressions and use
334 the fold() interfaces. Long term, we need to develop folding and combine
335 routines that deal with gimple exclusively . */
338 rhs_to_tree (tree type, gimple stmt)
340 location_t loc = gimple_location (stmt);
341 enum tree_code code = gimple_assign_rhs_code (stmt);
342 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
343 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
344 gimple_assign_rhs2 (stmt));
345 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
346 return build1 (code, type, gimple_assign_rhs1 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
348 return gimple_assign_rhs1 (stmt);
353 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
354 the folded result in a form suitable for COND_EXPR_COND or
355 NULL_TREE, if there is no suitable simplified form. If
356 INVARIANT_ONLY is true only gimple_min_invariant results are
357 considered simplified. */
360 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
361 tree op0, tree op1, bool invariant_only)
365 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
367 t = fold_binary_loc (loc, code, type, op0, op1);
371 /* Require that we got a boolean type out if we put one in. */
372 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
374 /* Canonicalize the combined condition for use in a COND_EXPR. */
375 t = canonicalize_cond_expr_cond (t);
377 /* Bail out if we required an invariant but didn't get one. */
378 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
384 /* Propagate from the ssa name definition statements of COND_EXPR
385 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
386 Returns zero if no statement was changed, one if there were
387 changes and two if cfg_cleanup needs to run.
389 This must be kept in sync with forward_propagate_into_cond. */
392 forward_propagate_into_gimple_cond (gimple stmt)
394 int did_something = 0;
395 location_t loc = gimple_location (stmt);
398 tree tmp = NULL_TREE;
399 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
401 bool single_use0_p = false, single_use1_p = false;
402 enum tree_code code = gimple_cond_code (stmt);
404 /* We can do tree combining on SSA_NAME and comparison expressions. */
405 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
407 /* For comparisons use the first operand, that is likely to
408 simplify comparisons against constants. */
409 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
411 name = gimple_cond_lhs (stmt);
412 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
413 if (def_stmt && can_propagate_from (def_stmt))
415 tree op1 = gimple_cond_rhs (stmt);
416 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
417 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
418 rhs0, op1, !single_use0_p);
421 /* If that wasn't successful, try the second operand. */
423 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
425 tree op0 = gimple_cond_lhs (stmt);
426 name = gimple_cond_rhs (stmt);
427 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
428 if (!def_stmt || !can_propagate_from (def_stmt))
429 return did_something;
431 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
432 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
433 rhs1, !single_use1_p);
435 /* If that wasn't successful either, try both operands. */
438 && rhs1 != NULL_TREE)
439 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
440 fold_convert_loc (loc,
443 !(single_use0_p && single_use1_p));
448 if (dump_file && tmp)
450 tree cond = build2 (gimple_cond_code (stmt),
452 gimple_cond_lhs (stmt),
453 gimple_cond_rhs (stmt));
454 fprintf (dump_file, " Replaced '");
455 print_generic_expr (dump_file, cond, 0);
456 fprintf (dump_file, "' with '");
457 print_generic_expr (dump_file, tmp, 0);
458 fprintf (dump_file, "'\n");
461 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
464 /* Remove defining statements. */
465 remove_prop_source_from_use (name, NULL);
467 if (is_gimple_min_invariant (tmp))
469 else if (did_something == 0)
472 /* Continue combining. */
479 return did_something;
483 /* Propagate from the ssa name definition statements of COND_EXPR
484 in the rhs of statement STMT into the conditional if that simplifies it.
485 Returns zero if no statement was changed, one if there were
486 changes and two if cfg_cleanup needs to run.
488 This must be kept in sync with forward_propagate_into_gimple_cond. */
491 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
493 gimple stmt = gsi_stmt (*gsi_p);
494 location_t loc = gimple_location (stmt);
495 int did_something = 0;
498 tree tmp = NULL_TREE;
499 tree cond = gimple_assign_rhs1 (stmt);
500 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
502 bool single_use0_p = false, single_use1_p = false;
504 /* We can do tree combining on SSA_NAME and comparison expressions. */
505 if (COMPARISON_CLASS_P (cond)
506 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
508 /* For comparisons use the first operand, that is likely to
509 simplify comparisons against constants. */
510 name = TREE_OPERAND (cond, 0);
511 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
512 if (def_stmt && can_propagate_from (def_stmt))
514 tree op1 = TREE_OPERAND (cond, 1);
515 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
516 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
518 rhs0, op1, !single_use0_p);
520 /* If that wasn't successful, try the second operand. */
522 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
524 tree op0 = TREE_OPERAND (cond, 0);
525 name = TREE_OPERAND (cond, 1);
526 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
527 if (!def_stmt || !can_propagate_from (def_stmt))
528 return did_something;
530 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
531 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
533 op0, rhs1, !single_use1_p);
535 /* If that wasn't successful either, try both operands. */
538 && rhs1 != NULL_TREE)
539 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
542 fold_convert_loc (loc,
545 !(single_use0_p && single_use1_p));
547 else if (TREE_CODE (cond) == SSA_NAME)
550 def_stmt = get_prop_source_stmt (name, true, NULL);
551 if (def_stmt || !can_propagate_from (def_stmt))
552 return did_something;
554 rhs0 = gimple_assign_rhs1 (def_stmt);
555 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
556 build_int_cst (TREE_TYPE (rhs0), 0),
562 if (dump_file && tmp)
564 fprintf (dump_file, " Replaced '");
565 print_generic_expr (dump_file, cond, 0);
566 fprintf (dump_file, "' with '");
567 print_generic_expr (dump_file, tmp, 0);
568 fprintf (dump_file, "'\n");
571 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
572 stmt = gsi_stmt (*gsi_p);
575 /* Remove defining statements. */
576 remove_prop_source_from_use (name, NULL);
578 if (is_gimple_min_invariant (tmp))
580 else if (did_something == 0)
583 /* Continue combining. */
590 return did_something;
593 /* We've just substituted an ADDR_EXPR into stmt. Update all the
594 relevant data structures to match. */
597 tidy_after_forward_propagate_addr (gimple stmt)
599 /* We may have turned a trapping insn into a non-trapping insn. */
600 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
601 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
604 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
605 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
608 /* DEF_RHS contains the address of the 0th element in an array.
609 USE_STMT uses type of DEF_RHS to compute the address of an
610 arbitrary element within the array. The (variable) byte offset
611 of the element is contained in OFFSET.
613 We walk back through the use-def chains of OFFSET to verify that
614 it is indeed computing the offset of an element within the array
615 and extract the index corresponding to the given byte offset.
617 We then try to fold the entire address expression into a form
620 If we are successful, we replace the right hand side of USE_STMT
621 with the new address computation. */
624 forward_propagate_addr_into_variable_array_index (tree offset,
626 gimple_stmt_iterator *use_stmt_gsi)
629 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
632 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
633 if (!host_integerp (tunit, 1))
636 /* Get the offset's defining statement. */
637 offset_def = SSA_NAME_DEF_STMT (offset);
639 /* Try to find an expression for a proper index. This is either a
640 multiplication expression by the element size or just the ssa name we came
641 along in case the element size is one. In that case, however, we do not
642 allow multiplications because they can be computing index to a higher
643 level dimension (PR 37861). */
644 if (integer_onep (tunit))
646 if (is_gimple_assign (offset_def)
647 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
654 /* The statement which defines OFFSET before type conversion
655 must be a simple GIMPLE_ASSIGN. */
656 if (!is_gimple_assign (offset_def))
659 /* The RHS of the statement which defines OFFSET must be a
660 multiplication of an object by the size of the array elements.
661 This implicitly verifies that the size of the array elements
663 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
664 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
665 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
667 /* The first operand to the MULT_EXPR is the desired index. */
668 index = gimple_assign_rhs1 (offset_def);
670 /* If we have idx * tunit + CST * tunit re-associate that. */
671 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
672 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
673 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
674 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
675 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
676 gimple_assign_rhs2 (offset_def),
677 tunit)) != NULL_TREE)
679 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
680 if (is_gimple_assign (offset_def2)
681 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
682 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
683 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
685 index = fold_build2 (gimple_assign_rhs_code (offset_def),
687 gimple_assign_rhs1 (offset_def2), tmp);
696 /* Replace the pointer addition with array indexing. */
697 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
698 true, GSI_SAME_STMT);
699 gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
700 use_stmt = gsi_stmt (*use_stmt_gsi);
701 TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
704 /* That should have created gimple, so there is no need to
705 record information to undo the propagation. */
706 fold_stmt_inplace (use_stmt);
707 tidy_after_forward_propagate_addr (use_stmt);
711 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
712 ADDR_EXPR <whatever>.
714 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
715 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
716 node or for recovery of array indexing from pointer arithmetic.
718 Return true if the propagation was successful (the propagation can
719 be not totally successful, yet things may have been changed). */
722 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
723 gimple_stmt_iterator *use_stmt_gsi,
726 tree lhs, rhs, rhs2, array_ref;
728 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
729 enum tree_code rhs_code;
733 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
735 lhs = gimple_assign_lhs (use_stmt);
736 rhs_code = gimple_assign_rhs_code (use_stmt);
737 rhs = gimple_assign_rhs1 (use_stmt);
739 /* Trivial cases. The use statement could be a trivial copy or a
740 useless conversion. Recurse to the uses of the lhs as copyprop does
741 not copy through different variant pointers and FRE does not catch
742 all useless conversions. Treat the case of a single-use name and
743 a conversion to def_rhs type separate, though. */
744 if (TREE_CODE (lhs) == SSA_NAME
745 && ((rhs_code == SSA_NAME && rhs == name)
746 || CONVERT_EXPR_CODE_P (rhs_code)))
748 /* Only recurse if we don't deal with a single use or we cannot
749 do the propagation to the current statement. In particular
750 we can end up with a conversion needed for a non-invariant
751 address which we cannot do in a single statement. */
753 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
754 && (!is_gimple_min_invariant (def_rhs)
755 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
756 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
757 && (TYPE_PRECISION (TREE_TYPE (lhs))
758 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
759 return forward_propagate_addr_expr (lhs, def_rhs);
761 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
762 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
763 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
765 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
769 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
770 ADDR_EXPR will not appear on the LHS. */
771 lhsp = gimple_assign_lhs_ptr (use_stmt);
772 while (handled_component_p (*lhsp))
773 lhsp = &TREE_OPERAND (*lhsp, 0);
776 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
777 propagate the ADDR_EXPR into the use of NAME and fold the result. */
778 if (TREE_CODE (lhs) == INDIRECT_REF
779 && TREE_OPERAND (lhs, 0) == name)
781 if (may_propagate_address_into_dereference (def_rhs, lhs)
782 && (lhsp != gimple_assign_lhs_ptr (use_stmt)
783 || useless_type_conversion_p
784 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
786 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
787 fold_stmt_inplace (use_stmt);
788 tidy_after_forward_propagate_addr (use_stmt);
790 /* Continue propagating into the RHS if this was not the only use. */
795 /* We can have a struct assignment dereferencing our name twice.
796 Note that we didn't propagate into the lhs to not falsely
797 claim we did when propagating into the rhs. */
801 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
802 nodes from the RHS. */
803 rhsp = gimple_assign_rhs1_ptr (use_stmt);
804 if (TREE_CODE (*rhsp) == ADDR_EXPR)
806 rhsp = &TREE_OPERAND (*rhsp, 0);
809 while (handled_component_p (*rhsp))
810 rhsp = &TREE_OPERAND (*rhsp, 0);
813 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
814 propagate the ADDR_EXPR into the use of NAME and fold the result. */
815 if (TREE_CODE (rhs) == INDIRECT_REF
816 && TREE_OPERAND (rhs, 0) == name
817 && may_propagate_address_into_dereference (def_rhs, rhs))
819 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
820 fold_stmt_inplace (use_stmt);
821 tidy_after_forward_propagate_addr (use_stmt);
825 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
826 propagate the ADDR_EXPR into the use of NAME and try to
827 create a VCE and fold the result. */
828 if (TREE_CODE (rhs) == INDIRECT_REF
829 && TREE_OPERAND (rhs, 0) == name
830 && TYPE_SIZE (TREE_TYPE (rhs))
831 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
832 /* Function decls should not be used for VCE either as it could be a
833 function descriptor that we want and not the actual function code. */
834 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
835 /* We should not convert volatile loads to non volatile loads. */
836 && !TYPE_VOLATILE (TREE_TYPE (rhs))
837 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
838 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
839 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
840 /* Make sure we only do TBAA compatible replacements. */
841 && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
843 tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
844 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
845 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
847 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
848 valid if we can replace the whole rhs of the use statement. */
849 if (rhs != gimple_assign_rhs1 (use_stmt))
851 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
853 gimple_assign_set_rhs1 (use_stmt, new_rhs);
854 tidy_after_forward_propagate_addr (use_stmt);
857 /* If the defining rhs comes from an indirect reference, then do not
858 convert into a VIEW_CONVERT_EXPR. Likewise if we'll end up taking
859 the address of a V_C_E of a constant. */
860 def_rhs_base = TREE_OPERAND (def_rhs, 0);
861 while (handled_component_p (def_rhs_base))
862 def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
863 if (!INDIRECT_REF_P (def_rhs_base)
865 || !is_gimple_min_invariant (def_rhs)))
867 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
868 reference. Place it there and fold the thing. */
870 fold_stmt_inplace (use_stmt);
871 tidy_after_forward_propagate_addr (use_stmt);
876 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
878 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
879 || gimple_assign_rhs1 (use_stmt) != name)
882 /* The remaining cases are all for turning pointer arithmetic into
883 array indexing. They only apply when we have the address of
884 element zero in an array. If that is not the case then there
886 array_ref = TREE_OPERAND (def_rhs, 0);
887 if (TREE_CODE (array_ref) != ARRAY_REF
888 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
889 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
892 rhs2 = gimple_assign_rhs2 (use_stmt);
893 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
894 of the elements in X into &x[C1 + C2/element size]. */
895 if (TREE_CODE (rhs2) == INTEGER_CST)
897 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
902 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
903 new_rhs = unshare_expr (new_rhs);
904 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
906 if (!is_gimple_min_invariant (new_rhs))
907 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
909 true, GSI_SAME_STMT);
910 new_rhs = fold_convert (type, new_rhs);
912 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
913 use_stmt = gsi_stmt (*use_stmt_gsi);
914 update_stmt (use_stmt);
915 tidy_after_forward_propagate_addr (use_stmt);
920 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
921 converting a multiplication of an index by the size of the
922 array elements, then the result is converted into the proper
923 type for the arithmetic. */
924 if (TREE_CODE (rhs2) == SSA_NAME
925 && integer_zerop (TREE_OPERAND (array_ref, 1))
926 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
927 /* Avoid problems with IVopts creating PLUS_EXPRs with a
928 different type than their operands. */
929 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
930 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
935 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
937 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
938 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
939 node or for recovery of array indexing from pointer arithmetic.
940 Returns true, if all uses have been propagated into. */
943 forward_propagate_addr_expr (tree name, tree rhs)
945 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
946 imm_use_iterator iter;
949 bool single_use_p = has_single_use (name);
951 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
956 /* If the use is not in a simple assignment statement, then
957 there is nothing we can do. */
958 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
960 if (!is_gimple_debug (use_stmt))
965 /* If the use is in a deeper loop nest, then we do not want
966 to propagate non-invariant ADDR_EXPRs into the loop as that
967 is likely adding expression evaluations into the loop. */
968 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
969 && !is_gimple_min_invariant (rhs))
976 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
977 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
979 /* If the use has moved to a different statement adjust
980 the update machinery for the old statement too. */
981 if (use_stmt != gsi_stmt (gsi))
983 update_stmt (use_stmt);
984 use_stmt = gsi_stmt (gsi);
987 update_stmt (use_stmt);
991 /* Remove intermediate now unused copy and conversion chains. */
992 use_rhs = gimple_assign_rhs1 (use_stmt);
994 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
995 && TREE_CODE (use_rhs) == SSA_NAME
996 && has_zero_uses (gimple_assign_lhs (use_stmt)))
998 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
999 release_defs (use_stmt);
1000 gsi_remove (&gsi, true);
1007 /* Forward propagate the comparison defined in STMT like
1008 cond_1 = x CMP y to uses of the form
1012 Returns true if stmt is now unused. */
1015 forward_propagate_comparison (gimple stmt)
1017 tree name = gimple_assign_lhs (stmt);
1019 tree tmp = NULL_TREE;
1021 /* Don't propagate ssa names that occur in abnormal phis. */
1022 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1023 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1024 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1025 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1028 /* Do not un-cse comparisons. But propagate through copies. */
1029 use_stmt = get_prop_dest_stmt (name, &name);
1033 /* Conversion of the condition result to another integral type. */
1034 if (is_gimple_assign (use_stmt)
1035 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1036 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1038 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1039 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1041 tree lhs = gimple_assign_lhs (use_stmt);
1043 /* We can propagate the condition into a conversion. */
1044 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1046 /* Avoid using fold here as that may create a COND_EXPR with
1047 non-boolean condition as canonical form. */
1048 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1049 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1051 /* We can propagate the condition into X op CST where op
1052 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1053 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1055 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1056 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1058 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1059 tree cst = gimple_assign_rhs2 (use_stmt);
1062 cond = build2 (gimple_assign_rhs_code (stmt),
1064 gimple_assign_rhs1 (stmt),
1065 gimple_assign_rhs2 (stmt));
1067 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1068 code, TREE_TYPE (lhs),
1070 if (tmp == NULL_TREE)
1073 /* We can propagate the condition into a statement that
1074 computes the logical negation of the comparison result. */
1075 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1077 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1078 bool nans = HONOR_NANS (TYPE_MODE (type));
1079 enum tree_code code;
1080 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1081 if (code == ERROR_MARK)
1084 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1085 gimple_assign_rhs2 (stmt));
1091 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1092 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1093 use_stmt = gsi_stmt (gsi);
1094 update_stmt (use_stmt);
1097 /* Remove defining statements. */
1098 remove_prop_source_from_use (name, stmt);
1100 if (dump_file && (dump_flags & TDF_DETAILS))
1102 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1104 fprintf (dump_file, " Replaced '");
1105 print_generic_expr (dump_file, old_rhs, dump_flags);
1106 fprintf (dump_file, "' with '");
1107 print_generic_expr (dump_file, tmp, dump_flags);
1108 fprintf (dump_file, "'\n");
1117 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1118 If so, we can change STMT into lhs = y which can later be copy
1119 propagated. Similarly for negation.
1121 This could trivially be formulated as a forward propagation
1122 to immediate uses. However, we already had an implementation
1123 from DOM which used backward propagation via the use-def links.
1125 It turns out that backward propagation is actually faster as
1126 there's less work to do for each NOT/NEG expression we find.
1127 Backwards propagation needs to look at the statement in a single
1128 backlink. Forward propagation needs to look at potentially more
1129 than one forward link. */
1132 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1134 gimple stmt = gsi_stmt (*gsi_p);
1135 tree rhs = gimple_assign_rhs1 (stmt);
1136 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1138 /* See if the RHS_DEF_STMT has the same form as our statement. */
1139 if (is_gimple_assign (rhs_def_stmt)
1140 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1142 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1144 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1145 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1146 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1148 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1149 stmt = gsi_stmt (*gsi_p);
1155 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1156 the condition which we may be able to optimize better. */
1159 simplify_gimple_switch (gimple stmt)
1161 tree cond = gimple_switch_index (stmt);
1165 /* The optimization that we really care about is removing unnecessary
1166 casts. That will let us do much better in propagating the inferred
1167 constant at the switch target. */
1168 if (TREE_CODE (cond) == SSA_NAME)
1170 def_stmt = SSA_NAME_DEF_STMT (cond);
1171 if (is_gimple_assign (def_stmt))
1173 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1178 def = gimple_assign_rhs1 (def_stmt);
1180 #ifdef ENABLE_CHECKING
1181 /* ??? Why was Jeff testing this? We are gimple... */
1182 gcc_assert (is_gimple_val (def));
1185 to = TREE_TYPE (cond);
1186 ti = TREE_TYPE (def);
1188 /* If we have an extension that preserves value, then we
1189 can copy the source value into the switch. */
1191 need_precision = TYPE_PRECISION (ti);
1193 if (! INTEGRAL_TYPE_P (ti))
1195 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1197 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1198 need_precision += 1;
1199 if (TYPE_PRECISION (to) < need_precision)
1204 gimple_switch_set_index (stmt, def);
1212 /* Run bitwise and assignments throug the folder. If the first argument is an
1213 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1214 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1218 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1221 tree arg1 = gimple_assign_rhs1 (stmt);
1222 tree arg2 = gimple_assign_rhs2 (stmt);
1224 if (TREE_CODE (arg2) != INTEGER_CST)
1227 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1229 gimple def = SSA_NAME_DEF_STMT (arg1);
1231 if (gimple_assign_cast_p (def)
1232 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1234 tree op = gimple_assign_rhs1 (def);
1236 if (TREE_CODE (op) == ADDR_EXPR)
1241 res = fold_binary_loc (gimple_location (stmt),
1242 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1244 if (res && is_gimple_min_invariant (res))
1246 gimple_assign_set_rhs_from_tree (gsi, res);
1252 /* Main entry point for the forward propagation optimizer. */
1255 tree_ssa_forward_propagate_single_use_vars (void)
1258 unsigned int todoflags = 0;
1260 cfg_changed = false;
1264 gimple_stmt_iterator gsi;
1266 /* Note we update GSI within the loop as necessary. */
1267 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1269 gimple stmt = gsi_stmt (gsi);
1271 /* If this statement sets an SSA_NAME to an address,
1272 try to propagate the address into the uses of the SSA_NAME. */
1273 if (is_gimple_assign (stmt))
1275 tree lhs = gimple_assign_lhs (stmt);
1276 tree rhs = gimple_assign_rhs1 (stmt);
1278 if (TREE_CODE (lhs) != SSA_NAME)
1284 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1285 /* Handle pointer conversions on invariant addresses
1286 as well, as this is valid gimple. */
1287 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1288 && TREE_CODE (rhs) == ADDR_EXPR
1289 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1292 if (!stmt_references_abnormal_ssa_name (stmt)
1293 && forward_propagate_addr_expr (lhs, rhs))
1295 release_defs (stmt);
1296 todoflags |= TODO_remove_unused_locals;
1297 gsi_remove (&gsi, true);
1302 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1303 && is_gimple_min_invariant (rhs))
1305 /* Make sure to fold &a[0] + off_1 here. */
1306 fold_stmt_inplace (stmt);
1308 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1311 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1312 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1313 && TREE_CODE (rhs) == SSA_NAME)
1315 simplify_not_neg_expr (&gsi);
1318 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1320 /* In this case the entire COND_EXPR is in rhs1. */
1322 fold_defer_overflow_warnings ();
1323 did_something = forward_propagate_into_cond (&gsi);
1324 stmt = gsi_stmt (gsi);
1325 if (did_something == 2)
1327 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1328 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1331 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1334 if (forward_propagate_comparison (stmt))
1336 release_defs (stmt);
1337 todoflags |= TODO_remove_unused_locals;
1338 gsi_remove (&gsi, true);
1343 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1345 simplify_bitwise_and (&gsi, stmt);
1351 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1353 simplify_gimple_switch (stmt);
1356 else if (gimple_code (stmt) == GIMPLE_COND)
1359 fold_defer_overflow_warnings ();
1360 did_something = forward_propagate_into_gimple_cond (stmt);
1361 if (did_something == 2)
1363 fold_undefer_overflow_warnings (did_something, stmt,
1364 WARN_STRICT_OVERFLOW_CONDITIONAL);
1373 todoflags |= TODO_cleanup_cfg;
1379 gate_forwprop (void)
1381 return flag_tree_forwprop;
1384 struct gimple_opt_pass pass_forwprop =
1388 "forwprop", /* name */
1389 gate_forwprop, /* gate */
1390 tree_ssa_forward_propagate_single_use_vars, /* execute */
1393 0, /* static_pass_number */
1394 TV_TREE_FORWPROP, /* tv_id */
1395 PROP_cfg | PROP_ssa, /* properties_required */
1396 0, /* properties_provided */
1397 0, /* properties_destroyed */
1398 0, /* todo_flags_start */
1402 | TODO_verify_ssa /* todo_flags_finish */