1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
29 #include "basic-block.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "tree-dump.h"
35 #include "langhooks.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
52 if (x) goto ... else goto ...
54 Will be transformed into:
57 if (a COND b) goto ... else goto ...
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
61 Or (assuming c1 and c2 are constants):
65 if (x EQ/NEQ c2) goto ... else goto ...
67 Will be transformed into:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
72 Similarly for x = a - c1.
78 if (x) goto ... else goto ...
80 Will be transformed into:
83 if (a == 0) goto ... else goto ...
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
93 if (x) goto ... else goto ...
95 Will be transformed into:
98 if (a != 0) goto ... else goto ...
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
116 A second class of propagation opportunities arises for ADDR_EXPR
129 ptr2 = ptr + <constant>;
133 ptr2 = &x[constant/elementsize];
138 offset = index * element_size;
139 offset_p = (pointer) offset;
140 ptr2 = ptr + offset_p
142 Will get turned into:
146 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
147 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
150 This will (of course) be extended as other needs arise. */
152 static bool forward_propagate_addr_expr (tree name, tree rhs);
154 /* Set to true if we delete EH edges during the optimization. */
155 static bool cfg_changed;
158 /* Given an SSA_NAME VAR, return true if and only if VAR is defined by
162 ssa_name_defined_by_comparison_p (tree var)
164 tree def = SSA_NAME_DEF_STMT (var);
166 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
168 tree rhs = GIMPLE_STMT_OPERAND (def, 1);
169 return COMPARISON_CLASS_P (rhs);
175 /* Forward propagate a single-use variable into COND once. Return a
176 new condition if successful. Return NULL_TREE otherwise. */
179 forward_propagate_into_cond_1 (tree cond, tree *test_var_p)
181 tree new_cond = NULL_TREE;
182 enum tree_code cond_code = TREE_CODE (cond);
183 tree test_var = NULL_TREE;
187 /* If the condition is not a lone variable or an equality test of an
188 SSA_NAME against an integral constant, then we do not have an
191 Note these conditions also ensure the COND_EXPR has no
192 virtual operands or other side effects. */
193 if (cond_code != SSA_NAME
194 && !((cond_code == EQ_EXPR || cond_code == NE_EXPR)
195 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
196 && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1))
197 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))))
200 /* Extract the single variable used in the test into TEST_VAR. */
201 if (cond_code == SSA_NAME)
204 test_var = TREE_OPERAND (cond, 0);
206 /* Now get the defining statement for TEST_VAR. Skip this case if
207 it's not defined by some GIMPLE_MODIFY_STMT. */
208 def = SSA_NAME_DEF_STMT (test_var);
209 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT)
212 def_rhs = GIMPLE_STMT_OPERAND (def, 1);
214 /* If TEST_VAR is set by adding or subtracting a constant
215 from an SSA_NAME, then it is interesting to us as we
216 can adjust the constant in the conditional and thus
217 eliminate the arithmetic operation. */
218 if (TREE_CODE (def_rhs) == PLUS_EXPR
219 || TREE_CODE (def_rhs) == MINUS_EXPR)
221 tree op0 = TREE_OPERAND (def_rhs, 0);
222 tree op1 = TREE_OPERAND (def_rhs, 1);
224 /* The first operand must be an SSA_NAME and the second
225 operand must be a constant. */
226 if (TREE_CODE (op0) != SSA_NAME
227 || !CONSTANT_CLASS_P (op1)
228 || !INTEGRAL_TYPE_P (TREE_TYPE (op1)))
231 /* Don't propagate if the first operand occurs in
233 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
236 if (has_single_use (test_var))
238 enum tree_code new_code;
241 /* If the variable was defined via X + C, then we must
242 subtract C from the constant in the conditional.
243 Otherwise we add C to the constant in the
244 conditional. The result must fold into a valid
245 gimple operand to be optimizable. */
246 new_code = (TREE_CODE (def_rhs) == PLUS_EXPR
247 ? MINUS_EXPR : PLUS_EXPR);
248 t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0);
249 if (!is_gimple_val (t))
252 new_cond = build2 (cond_code, boolean_type_node, op0, t);
256 /* These cases require comparisons of a naked SSA_NAME or
257 comparison of an SSA_NAME against zero or one. */
258 else if (TREE_CODE (cond) == SSA_NAME
259 || integer_zerop (TREE_OPERAND (cond, 1))
260 || integer_onep (TREE_OPERAND (cond, 1)))
262 /* If TEST_VAR is set from a relational operation
263 between two SSA_NAMEs or a combination of an SSA_NAME
264 and a constant, then it is interesting. */
265 if (COMPARISON_CLASS_P (def_rhs))
267 tree op0 = TREE_OPERAND (def_rhs, 0);
268 tree op1 = TREE_OPERAND (def_rhs, 1);
270 /* Both operands of DEF_RHS must be SSA_NAMEs or
272 if ((TREE_CODE (op0) != SSA_NAME
273 && !is_gimple_min_invariant (op0))
274 || (TREE_CODE (op1) != SSA_NAME
275 && !is_gimple_min_invariant (op1)))
278 /* Don't propagate if the first operand occurs in
280 if (TREE_CODE (op0) == SSA_NAME
281 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
284 /* Don't propagate if the second operand occurs in
286 if (TREE_CODE (op1) == SSA_NAME
287 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1))
290 if (has_single_use (test_var))
292 /* TEST_VAR was set from a relational operator. */
293 new_cond = build2 (TREE_CODE (def_rhs),
294 boolean_type_node, op0, op1);
296 /* Invert the conditional if necessary. */
297 if ((cond_code == EQ_EXPR
298 && integer_zerop (TREE_OPERAND (cond, 1)))
299 || (cond_code == NE_EXPR
300 && integer_onep (TREE_OPERAND (cond, 1))))
302 new_cond = invert_truthvalue (new_cond);
304 /* If we did not get a simple relational
305 expression or bare SSA_NAME, then we can
306 not optimize this case. */
307 if (!COMPARISON_CLASS_P (new_cond)
308 && TREE_CODE (new_cond) != SSA_NAME)
309 new_cond = NULL_TREE;
314 /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it
316 else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR)
318 enum tree_code new_code;
320 def_rhs = TREE_OPERAND (def_rhs, 0);
322 /* DEF_RHS must be an SSA_NAME or constant. */
323 if (TREE_CODE (def_rhs) != SSA_NAME
324 && !is_gimple_min_invariant (def_rhs))
327 /* Don't propagate if the operand occurs in
329 if (TREE_CODE (def_rhs) == SSA_NAME
330 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs))
333 if (cond_code == SSA_NAME
334 || (cond_code == NE_EXPR
335 && integer_zerop (TREE_OPERAND (cond, 1)))
336 || (cond_code == EQ_EXPR
337 && integer_onep (TREE_OPERAND (cond, 1))))
342 new_cond = build2 (new_code, boolean_type_node, def_rhs,
343 fold_convert (TREE_TYPE (def_rhs),
347 /* If TEST_VAR was set from a cast of an integer type
348 to a boolean type or a cast of a boolean to an
349 integral, then it is interesting. */
350 else if (TREE_CODE (def_rhs) == NOP_EXPR
351 || TREE_CODE (def_rhs) == CONVERT_EXPR)
356 outer_type = TREE_TYPE (def_rhs);
357 inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0));
359 if ((TREE_CODE (outer_type) == BOOLEAN_TYPE
360 && INTEGRAL_TYPE_P (inner_type))
361 || (TREE_CODE (inner_type) == BOOLEAN_TYPE
362 && INTEGRAL_TYPE_P (outer_type)))
364 else if (INTEGRAL_TYPE_P (outer_type)
365 && INTEGRAL_TYPE_P (inner_type)
366 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
367 && ssa_name_defined_by_comparison_p (TREE_OPERAND (def_rhs,
373 /* Don't propagate if the operand occurs in
375 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
376 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND
380 if (has_single_use (test_var))
382 enum tree_code new_code;
385 if (cond_code == SSA_NAME
386 || (cond_code == NE_EXPR
387 && integer_zerop (TREE_OPERAND (cond, 1)))
388 || (cond_code == EQ_EXPR
389 && integer_onep (TREE_OPERAND (cond, 1))))
394 new_arg = TREE_OPERAND (def_rhs, 0);
395 new_cond = build2 (new_code, boolean_type_node, new_arg,
396 fold_convert (TREE_TYPE (new_arg),
402 *test_var_p = test_var;
406 /* COND is a condition of the form:
408 x == const or x != const
410 Look back to x's defining statement and see if x is defined as
414 If const is unchanged if we convert it to type, then we can build
415 the equivalent expression:
418 y == const or y != const
420 Which may allow further optimizations.
422 Return the equivalent comparison or NULL if no such equivalent comparison
426 find_equivalent_equality_comparison (tree cond)
428 tree op0 = TREE_OPERAND (cond, 0);
429 tree op1 = TREE_OPERAND (cond, 1);
430 tree def_stmt = SSA_NAME_DEF_STMT (op0);
433 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
434 && TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) == SSA_NAME)
435 def_stmt = SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (def_stmt, 1));
437 /* OP0 might have been a parameter, so first make sure it
438 was defined by a GIMPLE_MODIFY_STMT. */
439 if (def_stmt && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT)
441 tree def_rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
443 /* If either operand to the comparison is a pointer to
444 a function, then we can not apply this optimization
445 as some targets require function pointers to be
446 canonicalized and in this case this optimization would
447 eliminate a necessary canonicalization. */
448 if ((POINTER_TYPE_P (TREE_TYPE (op0))
449 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE)
450 || (POINTER_TYPE_P (TREE_TYPE (op1))
451 && TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE))
454 /* Now make sure the RHS of the GIMPLE_MODIFY_STMT is a typecast. */
455 if ((TREE_CODE (def_rhs) == NOP_EXPR
456 || TREE_CODE (def_rhs) == CONVERT_EXPR)
457 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
459 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
460 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
463 if (TYPE_PRECISION (def_rhs_inner_type)
464 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
467 /* If the inner type of the conversion is a pointer to
468 a function, then we can not apply this optimization
469 as some targets require function pointers to be
470 canonicalized. This optimization would result in
471 canonicalization of the pointer when it was not originally
473 if (POINTER_TYPE_P (def_rhs_inner_type)
474 && TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE)
477 /* What we want to prove is that if we convert OP1 to
478 the type of the object inside the NOP_EXPR that the
479 result is still equivalent to SRC.
481 If that is true, the build and return new equivalent
482 condition which uses the source of the typecast and the
483 new constant (which has only changed its type). */
484 new = fold_build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
485 STRIP_USELESS_TYPE_CONVERSION (new);
486 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
487 return build2 (TREE_CODE (cond), TREE_TYPE (cond),
494 /* EXPR is a COND_EXPR
495 STMT is the statement containing EXPR.
497 This routine attempts to find equivalent forms of the condition
498 which we may be able to optimize better. */
501 simplify_cond (tree cond_expr, tree stmt)
503 tree cond = COND_EXPR_COND (cond_expr);
505 if (COMPARISON_CLASS_P (cond))
507 tree op0 = TREE_OPERAND (cond, 0);
508 tree op1 = TREE_OPERAND (cond, 1);
510 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
512 /* First see if we have test of an SSA_NAME against a constant
513 where the SSA_NAME is defined by an earlier typecast which
514 is irrelevant when performing tests against the given
516 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
518 tree new_cond = find_equivalent_equality_comparison (cond);
522 COND_EXPR_COND (cond_expr) = new_cond;
530 /* Forward propagate a single-use variable into COND_EXPR as many
531 times as possible. */
534 forward_propagate_into_cond (tree cond_expr, tree stmt)
536 gcc_assert (TREE_CODE (cond_expr) == COND_EXPR);
540 tree test_var = NULL_TREE;
541 tree cond = COND_EXPR_COND (cond_expr);
542 tree new_cond = forward_propagate_into_cond_1 (cond, &test_var);
544 /* Return if unsuccessful. */
545 if (new_cond == NULL_TREE)
549 if (dump_file && (dump_flags & TDF_DETAILS))
551 fprintf (dump_file, " Replaced '");
552 print_generic_expr (dump_file, cond, dump_flags);
553 fprintf (dump_file, "' with '");
554 print_generic_expr (dump_file, new_cond, dump_flags);
555 fprintf (dump_file, "'\n");
558 COND_EXPR_COND (cond_expr) = new_cond;
561 if (has_zero_uses (test_var))
563 tree def = SSA_NAME_DEF_STMT (test_var);
564 block_stmt_iterator bsi = bsi_for_stmt (def);
565 bsi_remove (&bsi, true);
570 /* There are further simplifications that can be performed
571 on COND_EXPRs. Specifically, when comparing an SSA_NAME
572 against a constant where the SSA_NAME is the result of a
573 conversion. Perhaps this should be folded into the rest
574 of the COND_EXPR simplification code. */
575 simplify_cond (cond_expr, stmt);
578 /* We've just substituted an ADDR_EXPR into stmt. Update all the
579 relevant data structures to match. */
582 tidy_after_forward_propagate_addr (tree stmt)
584 /* We may have turned a trapping insn into a non-trapping insn. */
585 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
586 && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
589 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
590 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
592 mark_symbols_for_renaming (stmt);
595 /* DEF_RHS defines LHS which is contains the address of the 0th element
596 in an array. USE_STMT uses LHS to compute the address of an
597 arbitrary element within the array. The (variable) byte offset
598 of the element is contained in OFFSET.
600 We walk back through the use-def chains of OFFSET to verify that
601 it is indeed computing the offset of an element within the array
602 and extract the index corresponding to the given byte offset.
604 We then try to fold the entire address expression into a form
607 If we are successful, we replace the right hand side of USE_STMT
608 with the new address computation. */
611 forward_propagate_addr_into_variable_array_index (tree offset, tree lhs,
612 tree def_rhs, tree use_stmt)
616 /* The offset must be defined by a simple GIMPLE_MODIFY_STMT statement. */
617 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
620 /* The RHS of the statement which defines OFFSET must be a gimple
621 cast of another SSA_NAME. */
622 offset = GIMPLE_STMT_OPERAND (offset, 1);
623 if (!is_gimple_cast (offset))
626 offset = TREE_OPERAND (offset, 0);
627 if (TREE_CODE (offset) != SSA_NAME)
630 /* Get the defining statement of the offset before type
632 offset = SSA_NAME_DEF_STMT (offset);
634 /* The statement which defines OFFSET before type conversion
635 must be a simple GIMPLE_MODIFY_STMT. */
636 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
639 /* The RHS of the statement which defines OFFSET must be a
640 multiplication of an object by the size of the array elements.
641 This implicitly verifies that the size of the array elements
643 offset = GIMPLE_STMT_OPERAND (offset, 1);
644 if (TREE_CODE (offset) != MULT_EXPR
645 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
646 || !simple_cst_equal (TREE_OPERAND (offset, 1),
647 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (lhs)))))
650 /* The first operand to the MULT_EXPR is the desired index. */
651 index = TREE_OPERAND (offset, 0);
653 /* Replace the pointer addition with array indexing. */
654 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
655 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
658 /* That should have created gimple, so there is no need to
659 record information to undo the propagation. */
660 fold_stmt_inplace (use_stmt);
661 tidy_after_forward_propagate_addr (use_stmt);
665 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
666 ADDR_EXPR <whatever>.
668 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
669 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
670 node or for recovery of array indexing from pointer arithmetic.
672 Return true if the propagation was successful (the propagation can
673 be not totally successful, yet things may have been changed). */
676 forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt)
678 tree lhs, rhs, array_ref;
680 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
681 ADDR_EXPR will not appear on the LHS. */
682 lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
683 while (handled_component_p (lhs))
684 lhs = TREE_OPERAND (lhs, 0);
686 rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
688 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
689 propagate the ADDR_EXPR into the use of NAME and fold the result. */
690 if (TREE_CODE (lhs) == INDIRECT_REF && TREE_OPERAND (lhs, 0) == name)
692 /* This should always succeed in creating gimple, so there is
693 no need to save enough state to undo this propagation. */
694 TREE_OPERAND (lhs, 0) = unshare_expr (def_rhs);
695 fold_stmt_inplace (use_stmt);
696 tidy_after_forward_propagate_addr (use_stmt);
698 /* Continue propagating into the RHS. */
701 /* Trivial case. The use statement could be a trivial copy or a
702 useless conversion. Recurse to the uses of the lhs as copyprop does
703 not copy through differen variant pointers and FRE does not catch
704 all useless conversions. */
705 else if ((TREE_CODE (lhs) == SSA_NAME
707 || ((TREE_CODE (rhs) == NOP_EXPR
708 || TREE_CODE (rhs) == CONVERT_EXPR)
709 && tree_ssa_useless_type_conversion_1 (TREE_TYPE (rhs),
710 TREE_TYPE (def_rhs))))
711 return forward_propagate_addr_expr (lhs, def_rhs);
713 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
714 nodes from the RHS. */
715 while (handled_component_p (rhs)
716 || TREE_CODE (rhs) == ADDR_EXPR)
717 rhs = TREE_OPERAND (rhs, 0);
719 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
720 propagate the ADDR_EXPR into the use of NAME and fold the result. */
721 if (TREE_CODE (rhs) == INDIRECT_REF && TREE_OPERAND (rhs, 0) == name)
723 /* This should always succeed in creating gimple, so there is
724 no need to save enough state to undo this propagation. */
725 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
726 fold_stmt_inplace (use_stmt);
727 tidy_after_forward_propagate_addr (use_stmt);
731 /* The remaining cases are all for turning pointer arithmetic into
732 array indexing. They only apply when we have the address of
733 element zero in an array. If that is not the case then there
735 array_ref = TREE_OPERAND (def_rhs, 0);
736 if (TREE_CODE (array_ref) != ARRAY_REF
737 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
738 || !integer_zerop (TREE_OPERAND (array_ref, 1)))
741 /* If the use of the ADDR_EXPR must be a PLUS_EXPR, or else there
743 if (TREE_CODE (rhs) != PLUS_EXPR)
746 /* Try to optimize &x[0] + C where C is a multiple of the size
747 of the elements in X into &x[C/element size]. */
748 if (TREE_OPERAND (rhs, 0) == name
749 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
751 tree orig = unshare_expr (rhs);
752 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
754 /* If folding succeeds, then we have just exposed new variables
755 in USE_STMT which will need to be renamed. If folding fails,
756 then we need to put everything back the way it was. */
757 if (fold_stmt_inplace (use_stmt))
759 tidy_after_forward_propagate_addr (use_stmt);
764 GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
765 update_stmt (use_stmt);
770 /* Try to optimize &x[0] + OFFSET where OFFSET is defined by
771 converting a multiplication of an index by the size of the
772 array elements, then the result is converted into the proper
773 type for the arithmetic. */
774 if (TREE_OPERAND (rhs, 0) == name
775 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
776 /* Avoid problems with IVopts creating PLUS_EXPRs with a
777 different type than their operands. */
778 && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
781 tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
783 res = forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
788 /* Same as the previous case, except the operands of the PLUS_EXPR
790 if (TREE_OPERAND (rhs, 1) == name
791 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
792 /* Avoid problems with IVopts creating PLUS_EXPRs with a
793 different type than their operands. */
794 && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
797 tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
798 res = forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
805 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
807 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
808 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
809 node or for recovery of array indexing from pointer arithmetic.
810 Returns true, if all uses have been propagated into. */
813 forward_propagate_addr_expr (tree name, tree rhs)
815 int stmt_loop_depth = bb_for_stmt (SSA_NAME_DEF_STMT (name))->loop_depth;
816 imm_use_iterator iter;
820 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
824 /* If the use is not in a simple assignment statement, then
825 there is nothing we can do. */
826 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
832 /* If the use is in a deeper loop nest, then we do not want
833 to propagate the ADDR_EXPR into the loop as that is likely
834 adding expression evaluations into the loop. */
835 if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
841 push_stmt_changes (&use_stmt);
843 result = forward_propagate_addr_expr_1 (name, rhs, use_stmt);
846 pop_stmt_changes (&use_stmt);
848 /* Remove intermediate now unused copy and conversion chains. */
850 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
851 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == SSA_NAME
852 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == NOP_EXPR
853 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == CONVERT_EXPR))
855 block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
856 release_defs (use_stmt);
857 bsi_remove (&bsi, true);
864 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
865 If so, we can change STMT into lhs = y which can later be copy
866 propagated. Similarly for negation.
868 This could trivially be formulated as a forward propagation
869 to immediate uses. However, we already had an implementation
870 from DOM which used backward propagation via the use-def links.
872 It turns out that backward propagation is actually faster as
873 there's less work to do for each NOT/NEG expression we find.
874 Backwards propagation needs to look at the statement in a single
875 backlink. Forward propagation needs to look at potentially more
876 than one forward link. */
879 simplify_not_neg_expr (tree stmt)
881 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
882 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
884 /* See if the RHS_DEF_STMT has the same form as our statement. */
885 if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
886 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
888 tree rhs_def_operand =
889 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
891 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
892 if (TREE_CODE (rhs_def_operand) == SSA_NAME
893 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
895 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
901 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
902 the condition which we may be able to optimize better. */
905 simplify_switch_expr (tree stmt)
907 tree cond = SWITCH_COND (stmt);
910 /* The optimization that we really care about is removing unnecessary
911 casts. That will let us do much better in propagating the inferred
912 constant at the switch target. */
913 if (TREE_CODE (cond) == SSA_NAME)
915 def = SSA_NAME_DEF_STMT (cond);
916 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
918 def = GIMPLE_STMT_OPERAND (def, 1);
919 if (TREE_CODE (def) == NOP_EXPR)
924 def = TREE_OPERAND (def, 0);
926 #ifdef ENABLE_CHECKING
927 /* ??? Why was Jeff testing this? We are gimple... */
928 gcc_assert (is_gimple_val (def));
931 to = TREE_TYPE (cond);
932 ti = TREE_TYPE (def);
934 /* If we have an extension that preserves value, then we
935 can copy the source value into the switch. */
937 need_precision = TYPE_PRECISION (ti);
939 if (! INTEGRAL_TYPE_P (ti))
941 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
943 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
945 if (TYPE_PRECISION (to) < need_precision)
950 SWITCH_COND (stmt) = def;
958 /* Main entry point for the forward propagation optimizer. */
961 tree_ssa_forward_propagate_single_use_vars (void)
964 unsigned int todoflags = 0;
970 block_stmt_iterator bsi;
972 /* Note we update BSI within the loop as necessary. */
973 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
975 tree stmt = bsi_stmt (bsi);
977 /* If this statement sets an SSA_NAME to an address,
978 try to propagate the address into the uses of the SSA_NAME. */
979 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
981 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
982 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
985 if (TREE_CODE (lhs) != SSA_NAME)
991 if (TREE_CODE (rhs) == ADDR_EXPR)
993 if (forward_propagate_addr_expr (lhs, rhs))
996 todoflags |= TODO_remove_unused_locals;
997 bsi_remove (&bsi, true);
1002 else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
1003 || TREE_CODE (rhs) == NEGATE_EXPR)
1004 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1006 simplify_not_neg_expr (stmt);
1009 else if (TREE_CODE (rhs) == COND_EXPR)
1011 forward_propagate_into_cond (rhs, stmt);
1017 else if (TREE_CODE (stmt) == SWITCH_EXPR)
1019 simplify_switch_expr (stmt);
1022 else if (TREE_CODE (stmt) == COND_EXPR)
1024 forward_propagate_into_cond (stmt, stmt);
1033 todoflags |= TODO_cleanup_cfg;
1039 gate_forwprop (void)
1044 struct tree_opt_pass pass_forwprop = {
1045 "forwprop", /* name */
1046 gate_forwprop, /* gate */
1047 tree_ssa_forward_propagate_single_use_vars, /* execute */
1050 0, /* static_pass_number */
1051 TV_TREE_FORWPROP, /* tv_id */
1052 PROP_cfg | PROP_ssa, /* properties_required */
1053 0, /* properties_provided */
1054 0, /* properties_destroyed */
1055 0, /* todo_flags_start */
1059 | TODO_verify_ssa, /* todo_flags_finish */
1064 /* Structure to keep track of the value of a dereferenced PHI result
1065 and the set of virtual operands used for that dereference. */
1073 /* Verify if the value recorded for NAME in PHIVN is still valid at
1074 the start of basic block BB. */
1077 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
1079 tree vop_stmt = phivn[SSA_NAME_VERSION (name)].vop_stmt;
1083 /* The def stmts of all virtual uses need to be post-dominated
1085 FOR_EACH_SSA_TREE_OPERAND (vuse, vop_stmt, ui, SSA_OP_VUSE)
1088 imm_use_iterator ui2;
1091 FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
1093 /* If BB does not dominate a VDEF, the value is invalid. */
1094 if (((TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1095 && !ZERO_SSA_OPERANDS (use_stmt, SSA_OP_VDEF))
1096 || TREE_CODE (use_stmt) == PHI_NODE)
1097 && !dominated_by_p (CDI_DOMINATORS, bb_for_stmt (use_stmt), bb))
1100 BREAK_FROM_IMM_USE_STMT (ui2);
1110 /* Insert a new phi node for the dereference of PHI at basic_block
1111 BB with the virtual operands from USE_STMT. */
1114 phiprop_insert_phi (basic_block bb, tree phi, tree use_stmt,
1115 struct phiprop_d *phivn, size_t n)
1121 /* Build a new PHI node to replace the definition of
1122 the indirect reference lhs. */
1123 res = GIMPLE_STMT_OPERAND (use_stmt, 0);
1124 SSA_NAME_DEF_STMT (res) = new_phi = create_phi_node (res, bb);
1126 /* Add PHI arguments for each edge inserting loads of the
1127 addressable operands. */
1128 FOR_EACH_EDGE (e, ei, bb->preds)
1130 tree old_arg, new_var, tmp;
1132 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
1133 while (TREE_CODE (old_arg) == SSA_NAME
1134 && (SSA_NAME_VERSION (old_arg) >= n
1135 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
1137 tree def_stmt = SSA_NAME_DEF_STMT (old_arg);
1138 old_arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1141 if (TREE_CODE (old_arg) == SSA_NAME)
1142 /* Reuse a formely created dereference. */
1143 new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
1146 old_arg = TREE_OPERAND (old_arg, 0);
1147 new_var = create_tmp_var (TREE_TYPE (old_arg), NULL);
1148 tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node,
1149 NULL_TREE, unshare_expr (old_arg));
1150 if (TREE_CODE (TREE_TYPE (old_arg)) == COMPLEX_TYPE
1151 || TREE_CODE (TREE_TYPE (old_arg)) == VECTOR_TYPE)
1152 DECL_GIMPLE_REG_P (new_var) = 1;
1153 add_referenced_var (new_var);
1154 new_var = make_ssa_name (new_var, tmp);
1155 GIMPLE_STMT_OPERAND (tmp, 0) = new_var;
1157 bsi_insert_on_edge (e, tmp);
1160 mark_symbols_for_renaming (tmp);
1163 add_phi_arg (new_phi, new_var, e);
1166 update_stmt (new_phi);
1171 /* Propagate between the phi node arguments of PHI in BB and phi result
1172 users. For now this matches
1173 # p_2 = PHI <&x, &y>
1180 Returns true if a transformation was done and edge insertions
1181 need to be committed. Global data PHIVN and N is used to track
1182 past transformation results. We need to be especially careful here
1183 with aliasing issues as we are moving memory reads. */
1186 propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
1188 tree ptr = PHI_RESULT (phi);
1189 tree use_stmt, res = NULL_TREE;
1190 block_stmt_iterator bsi;
1191 imm_use_iterator ui;
1192 use_operand_p arg_p, use;
1196 if (MTAG_P (SSA_NAME_VAR (ptr))
1197 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1198 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))))
1201 /* Check if we can "cheaply" dereference all phi arguments. */
1202 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
1204 tree arg = USE_FROM_PTR (arg_p);
1205 /* Walk the ssa chain until we reach a ssa name we already
1206 created a value for or we reach a definition of the form
1207 ssa_name_n = &var; */
1208 while (TREE_CODE (arg) == SSA_NAME
1209 && !SSA_NAME_IS_DEFAULT_DEF (arg)
1210 && (SSA_NAME_VERSION (arg) >= n
1211 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
1213 tree def_stmt = SSA_NAME_DEF_STMT (arg);
1214 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
1216 arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1218 if ((TREE_CODE (arg) != ADDR_EXPR
1219 /* Avoid to have to decay *&a to a[0] later. */
1220 || !is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (arg, 0))))
1221 && !(TREE_CODE (arg) == SSA_NAME
1222 && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
1223 && phivn_valid_p (phivn, arg, bb)))
1227 /* Find a dereferencing use. First follow (single use) ssa
1228 copy chains for ptr. */
1229 while (single_imm_use (ptr, &use, &use_stmt)
1230 && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1231 && GIMPLE_STMT_OPERAND (use_stmt, 1) == ptr
1232 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME)
1233 ptr = GIMPLE_STMT_OPERAND (use_stmt, 0);
1235 /* Replace the first dereference of *ptr if there is one and if we
1236 can move the loads to the place of the ptr phi node. */
1237 phi_inserted = false;
1238 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
1243 /* Check whether this is a load of *ptr. */
1244 if (!(TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1245 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
1246 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == INDIRECT_REF
1247 && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == ptr
1248 /* We cannot replace a load that may throw or is volatile. */
1249 && !tree_can_throw_internal (use_stmt)))
1252 /* Check if we can move the loads. The def stmts of all virtual uses
1253 need to be post-dominated by bb. */
1254 FOR_EACH_SSA_TREE_OPERAND (vuse, use_stmt, ui2, SSA_OP_VUSE)
1256 tree def_stmt = SSA_NAME_DEF_STMT (vuse);
1257 if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
1258 && (bb_for_stmt (def_stmt) == bb
1259 || !dominated_by_p (CDI_DOMINATORS,
1260 bb, bb_for_stmt (def_stmt))))
1264 /* Found a proper dereference. Insert a phi node if this
1265 is the first load transformation. */
1268 res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
1270 /* Remember the value we created for *ptr. */
1271 phivn[SSA_NAME_VERSION (ptr)].value = res;
1272 phivn[SSA_NAME_VERSION (ptr)].vop_stmt = use_stmt;
1274 /* Remove old stmt. The phi is taken care of by DCE, if we
1275 want to delete it here we also have to delete all intermediate
1277 bsi = bsi_for_stmt (use_stmt);
1278 bsi_remove (&bsi, 0);
1280 phi_inserted = true;
1284 /* Further replacements are easy, just make a copy out of the
1286 GIMPLE_STMT_OPERAND (use_stmt, 1) = res;
1287 update_stmt (use_stmt);
1291 /* Continue searching for a proper dereference. */
1294 return phi_inserted;
1297 /* Helper walking the dominator tree starting from BB and processing
1298 phi nodes with global data PHIVN and N. */
1301 tree_ssa_phiprop_1 (basic_block bb, struct phiprop_d *phivn, size_t n)
1303 bool did_something = false;
1307 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1308 did_something |= propagate_with_phi (bb, phi, phivn, n);
1310 for (son = first_dom_son (CDI_DOMINATORS, bb);
1312 son = next_dom_son (CDI_DOMINATORS, son))
1313 did_something |= tree_ssa_phiprop_1 (son, phivn, n);
1315 return did_something;
1318 /* Main entry for phiprop pass. */
1321 tree_ssa_phiprop (void)
1323 struct phiprop_d *phivn;
1325 calculate_dominance_info (CDI_DOMINATORS);
1327 phivn = XCNEWVEC (struct phiprop_d, num_ssa_names);
1329 if (tree_ssa_phiprop_1 (ENTRY_BLOCK_PTR, phivn, num_ssa_names))
1330 bsi_commit_edge_inserts ();
1343 struct tree_opt_pass pass_phiprop = {
1344 "phiprop", /* name */
1345 gate_phiprop, /* gate */
1346 tree_ssa_phiprop, /* execute */
1349 0, /* static_pass_number */
1350 TV_TREE_FORWPROP, /* tv_id */
1351 PROP_cfg | PROP_ssa, /* properties_required */
1352 0, /* properties_provided */
1353 0, /* properties_destroyed */
1354 0, /* todo_flags_start */
1358 | TODO_verify_ssa, /* todo_flags_finish */